var/home/core/zuul-output/0000755000175000017500000000000015115637754014543 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015115653276015504 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003420402415115653266017706 0ustar rootrootDec 08 21:18:42 crc systemd[1]: Starting Kubernetes Kubelet... Dec 08 21:18:42 crc restorecon[4676]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:42 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:43 crc restorecon[4676]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 08 21:18:43 crc kubenswrapper[4791]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:43 crc kubenswrapper[4791]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 08 21:18:43 crc kubenswrapper[4791]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:43 crc kubenswrapper[4791]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:43 crc kubenswrapper[4791]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 08 21:18:43 crc kubenswrapper[4791]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.424071 4791 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426654 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426673 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426678 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426682 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426687 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426691 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426695 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426698 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426702 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426719 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426722 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426727 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426731 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426734 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426738 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426741 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426746 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426749 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426758 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426763 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426768 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426773 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426777 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426783 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426788 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426793 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426799 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426804 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426809 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426814 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426819 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426823 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426827 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426831 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426836 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426842 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426847 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426851 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426855 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426858 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426862 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426866 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426870 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426874 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426878 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426882 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426886 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426889 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426893 4791 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426897 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426901 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426906 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426911 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426916 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426922 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426926 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426930 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426934 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426938 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426941 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426946 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426950 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426953 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426956 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426960 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426963 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426967 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426970 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426974 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426977 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.426982 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427067 4791 flags.go:64] FLAG: --address="0.0.0.0" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427075 4791 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427081 4791 flags.go:64] FLAG: --anonymous-auth="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427087 4791 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427092 4791 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427097 4791 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427102 4791 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427107 4791 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427112 4791 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427116 4791 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427121 4791 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427126 4791 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427130 4791 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427134 4791 flags.go:64] FLAG: --cgroup-root="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427138 4791 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427143 4791 flags.go:64] FLAG: --client-ca-file="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427148 4791 flags.go:64] FLAG: --cloud-config="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427152 4791 flags.go:64] FLAG: --cloud-provider="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427156 4791 flags.go:64] FLAG: --cluster-dns="[]" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427163 4791 flags.go:64] FLAG: --cluster-domain="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427167 4791 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427171 4791 flags.go:64] FLAG: --config-dir="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427175 4791 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427179 4791 flags.go:64] FLAG: --container-log-max-files="5" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427185 4791 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427189 4791 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427195 4791 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427199 4791 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427203 4791 flags.go:64] FLAG: --contention-profiling="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427207 4791 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427211 4791 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427216 4791 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427220 4791 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427225 4791 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427229 4791 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427233 4791 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427237 4791 flags.go:64] FLAG: --enable-load-reader="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427241 4791 flags.go:64] FLAG: --enable-server="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427246 4791 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427250 4791 flags.go:64] FLAG: --event-burst="100" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427254 4791 flags.go:64] FLAG: --event-qps="50" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427258 4791 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427263 4791 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427267 4791 flags.go:64] FLAG: --eviction-hard="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427272 4791 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427276 4791 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427280 4791 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427285 4791 flags.go:64] FLAG: --eviction-soft="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427289 4791 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427293 4791 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427296 4791 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427301 4791 flags.go:64] FLAG: --experimental-mounter-path="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427305 4791 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427309 4791 flags.go:64] FLAG: --fail-swap-on="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427313 4791 flags.go:64] FLAG: --feature-gates="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427319 4791 flags.go:64] FLAG: --file-check-frequency="20s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427323 4791 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427327 4791 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427332 4791 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427336 4791 flags.go:64] FLAG: --healthz-port="10248" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427340 4791 flags.go:64] FLAG: --help="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427344 4791 flags.go:64] FLAG: --hostname-override="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427348 4791 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427352 4791 flags.go:64] FLAG: --http-check-frequency="20s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427357 4791 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427361 4791 flags.go:64] FLAG: --image-credential-provider-config="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427365 4791 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427368 4791 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427372 4791 flags.go:64] FLAG: --image-service-endpoint="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427376 4791 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427380 4791 flags.go:64] FLAG: --kube-api-burst="100" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427384 4791 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427388 4791 flags.go:64] FLAG: --kube-api-qps="50" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427393 4791 flags.go:64] FLAG: --kube-reserved="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427397 4791 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427401 4791 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427405 4791 flags.go:64] FLAG: --kubelet-cgroups="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427409 4791 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427413 4791 flags.go:64] FLAG: --lock-file="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427417 4791 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427422 4791 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427425 4791 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427432 4791 flags.go:64] FLAG: --log-json-split-stream="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427436 4791 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427440 4791 flags.go:64] FLAG: --log-text-split-stream="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427444 4791 flags.go:64] FLAG: --logging-format="text" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427450 4791 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427455 4791 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427459 4791 flags.go:64] FLAG: --manifest-url="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427463 4791 flags.go:64] FLAG: --manifest-url-header="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427468 4791 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427473 4791 flags.go:64] FLAG: --max-open-files="1000000" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427478 4791 flags.go:64] FLAG: --max-pods="110" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427482 4791 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427486 4791 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427491 4791 flags.go:64] FLAG: --memory-manager-policy="None" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427495 4791 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427499 4791 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427503 4791 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427697 4791 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427726 4791 flags.go:64] FLAG: --node-status-max-images="50" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427730 4791 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427735 4791 flags.go:64] FLAG: --oom-score-adj="-999" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427740 4791 flags.go:64] FLAG: --pod-cidr="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427744 4791 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427750 4791 flags.go:64] FLAG: --pod-manifest-path="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427754 4791 flags.go:64] FLAG: --pod-max-pids="-1" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427758 4791 flags.go:64] FLAG: --pods-per-core="0" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427762 4791 flags.go:64] FLAG: --port="10250" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427767 4791 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427771 4791 flags.go:64] FLAG: --provider-id="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427775 4791 flags.go:64] FLAG: --qos-reserved="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427779 4791 flags.go:64] FLAG: --read-only-port="10255" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427784 4791 flags.go:64] FLAG: --register-node="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427788 4791 flags.go:64] FLAG: --register-schedulable="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427792 4791 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427805 4791 flags.go:64] FLAG: --registry-burst="10" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427809 4791 flags.go:64] FLAG: --registry-qps="5" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427814 4791 flags.go:64] FLAG: --reserved-cpus="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427818 4791 flags.go:64] FLAG: --reserved-memory="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427823 4791 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427827 4791 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427831 4791 flags.go:64] FLAG: --rotate-certificates="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427835 4791 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427839 4791 flags.go:64] FLAG: --runonce="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427843 4791 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427847 4791 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427853 4791 flags.go:64] FLAG: --seccomp-default="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427858 4791 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427862 4791 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427867 4791 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427871 4791 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427875 4791 flags.go:64] FLAG: --storage-driver-password="root" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427879 4791 flags.go:64] FLAG: --storage-driver-secure="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427884 4791 flags.go:64] FLAG: --storage-driver-table="stats" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427889 4791 flags.go:64] FLAG: --storage-driver-user="root" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427894 4791 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427898 4791 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427904 4791 flags.go:64] FLAG: --system-cgroups="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427909 4791 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427916 4791 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427922 4791 flags.go:64] FLAG: --tls-cert-file="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427926 4791 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427933 4791 flags.go:64] FLAG: --tls-min-version="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427937 4791 flags.go:64] FLAG: --tls-private-key-file="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427941 4791 flags.go:64] FLAG: --topology-manager-policy="none" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427945 4791 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427950 4791 flags.go:64] FLAG: --topology-manager-scope="container" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427954 4791 flags.go:64] FLAG: --v="2" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427959 4791 flags.go:64] FLAG: --version="false" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427965 4791 flags.go:64] FLAG: --vmodule="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427970 4791 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.427974 4791 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428090 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428095 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428099 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428104 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428107 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428112 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428115 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428120 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428124 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428128 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428132 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428136 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428139 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428144 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428148 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428153 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428158 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428162 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428166 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428170 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428174 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428179 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428183 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428187 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428190 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428194 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428197 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428201 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428204 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428208 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428212 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428215 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428219 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428222 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428226 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428229 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428233 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428237 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428240 4791 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428244 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428248 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428251 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428255 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428258 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428262 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428265 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428269 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428275 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428279 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428282 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428286 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428289 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428293 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428296 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428299 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428303 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428306 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428310 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428313 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428317 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428320 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428324 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428328 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428333 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428337 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428341 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428346 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428350 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428354 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428357 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.428361 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.428369 4791 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.439295 4791 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.439339 4791 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439463 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439475 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439483 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439489 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439495 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439501 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439507 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439513 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439518 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439523 4791 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439528 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439533 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439539 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439544 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439549 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439554 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439561 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439569 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439577 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439584 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439591 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439597 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439602 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439608 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439614 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439620 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439626 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439632 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439638 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439644 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439650 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439656 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439662 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439668 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439675 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439680 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439686 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439692 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439697 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439703 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439734 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439743 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439750 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439757 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439765 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439772 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439779 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439785 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439792 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439797 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439803 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439808 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439813 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439820 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439826 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439832 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439837 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439842 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439847 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439852 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439858 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439863 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439868 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439877 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439882 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439888 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439893 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439898 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439904 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439909 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.439914 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.439924 4791 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440082 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440091 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440097 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440104 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440109 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440117 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440124 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440131 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440137 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440142 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440148 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440153 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440158 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440164 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440169 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440175 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440180 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440185 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440191 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440196 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440201 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440207 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440212 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440217 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440226 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440232 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440239 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440244 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440250 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440256 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440262 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440269 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440275 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440280 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440287 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440293 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440298 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440304 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440310 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440316 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440321 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440326 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440332 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440337 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440342 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440348 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440353 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440360 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440367 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440372 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440378 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440383 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440388 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440394 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440400 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440407 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440415 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440421 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440427 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440433 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440439 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440444 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440450 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440456 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440463 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440470 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440477 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440483 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440488 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440494 4791 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.440499 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.440509 4791 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.440994 4791 server.go:940] "Client rotation is on, will bootstrap in background" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.444872 4791 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.444986 4791 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.446870 4791 server.go:997] "Starting client certificate rotation" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.446894 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.446964 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-19 18:19:36.922930774 +0000 UTC Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.447745 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.454575 4791 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.456561 4791 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.457128 4791 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.467744 4791 log.go:25] "Validated CRI v1 runtime API" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.492651 4791 log.go:25] "Validated CRI v1 image API" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.495340 4791 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.498955 4791 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-08-21-14-10-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.499032 4791 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.524003 4791 manager.go:217] Machine: {Timestamp:2025-12-08 21:18:43.521569689 +0000 UTC m=+0.220328104 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:b32d8891-0397-496e-a082-5c392c97eb30 BootID:1d08c457-44d8-4000-aa7e-f79b560f907e Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:a4:22:86 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:a4:22:86 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:6e:1d:29 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:d5:96:89 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:c4:71:97 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:19:90:f1 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:be:a6:03:de:3c:b4 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:f6:e7:a3:a7:48:a6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.524934 4791 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.525264 4791 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.526802 4791 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.527394 4791 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.527450 4791 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.528261 4791 topology_manager.go:138] "Creating topology manager with none policy" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.528277 4791 container_manager_linux.go:303] "Creating device plugin manager" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.528563 4791 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.528617 4791 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.529027 4791 state_mem.go:36] "Initialized new in-memory state store" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.529305 4791 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.530115 4791 kubelet.go:418] "Attempting to sync node with API server" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.530146 4791 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.530181 4791 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.530203 4791 kubelet.go:324] "Adding apiserver pod source" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.530218 4791 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.532815 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.532892 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.532822 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.532948 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.533142 4791 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.533673 4791 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.534914 4791 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535779 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535819 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535837 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535851 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535874 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535888 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535902 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535924 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535942 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535956 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535975 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.535993 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.536526 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.537298 4791 server.go:1280] "Started kubelet" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.537951 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.539272 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.539346 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 07:25:18.430637161 +0000 UTC Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.539627 4791 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.539791 4791 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.539805 4791 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.539868 4791 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 08 21:18:43 crc systemd[1]: Started Kubernetes Kubelet. Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.540486 4791 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.540291 4791 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.542544 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="200ms" Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.542597 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.542768 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.542876 4791 factory.go:55] Registering systemd factory Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.543213 4791 factory.go:221] Registration of the systemd container factory successfully Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.541880 4791 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.544533 4791 factory.go:153] Registering CRI-O factory Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.544824 4791 factory.go:221] Registration of the crio container factory successfully Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.545064 4791 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.545213 4791 factory.go:103] Registering Raw factory Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.545357 4791 manager.go:1196] Started watching for new ooms in manager Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.546696 4791 manager.go:319] Starting recovery of all containers Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.544229 4791 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.173:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187f5a2be5c0db9b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 21:18:43.537238939 +0000 UTC m=+0.235997324,LastTimestamp:2025-12-08 21:18:43.537238939 +0000 UTC m=+0.235997324,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.544927 4791 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.551616 4791 server.go:460] "Adding debug handlers to kubelet server" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557103 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557176 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557190 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557206 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557220 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557232 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557245 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557261 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557278 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557293 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557307 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557329 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557347 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557363 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557377 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557394 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557409 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557423 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557437 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557450 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557464 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557483 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557499 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557513 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557526 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557539 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557593 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557634 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557670 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557686 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557699 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557731 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557744 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557758 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557802 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557816 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557829 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557842 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557857 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557870 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557885 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557898 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557913 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557927 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557946 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.557961 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.560817 4791 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561277 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561302 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561317 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561330 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561346 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561358 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561379 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561396 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561412 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561427 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561443 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561463 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561477 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561490 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561504 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561518 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561533 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561554 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561569 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561582 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561671 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561686 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561700 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561739 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561753 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561766 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561780 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561793 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561805 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561819 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561832 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561846 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561863 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561876 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561891 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561905 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561918 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561931 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561944 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561957 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561971 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561983 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.561996 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562010 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562022 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562035 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562049 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562080 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562097 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562115 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562140 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562166 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562192 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562209 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562223 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562240 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562257 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562276 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562302 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562319 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562334 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562349 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562363 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562378 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562392 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562409 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562422 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562436 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562450 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562462 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562474 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562518 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562532 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562544 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562558 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562571 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562583 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562596 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562610 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562623 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562638 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562651 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562670 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562683 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562782 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562798 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562811 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562848 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562866 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562884 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562902 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562918 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562934 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562952 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562968 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.562986 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563066 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563083 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563098 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563114 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563130 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563146 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563164 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563212 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563246 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563261 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563276 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563293 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563311 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563327 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563344 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563360 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563495 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563515 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563555 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563589 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563606 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563625 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563642 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563684 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563741 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563760 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563872 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563890 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563906 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563923 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563939 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.563957 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564014 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564052 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564102 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564120 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564136 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564152 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564171 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564217 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564264 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564283 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564300 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564317 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564339 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564356 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564372 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564396 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564461 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564508 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564547 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564569 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564586 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564609 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564626 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.564995 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565035 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565055 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565073 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565089 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565105 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565121 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565186 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565247 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565290 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565326 4791 reconstruct.go:97] "Volume reconstruction finished" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.565344 4791 reconciler.go:26] "Reconciler: start to sync state" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.573586 4791 manager.go:324] Recovery completed Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.585056 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.587935 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.588040 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.588061 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.589900 4791 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.589993 4791 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.590059 4791 state_mem.go:36] "Initialized new in-memory state store" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.594479 4791 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.596440 4791 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.596493 4791 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.596524 4791 kubelet.go:2335] "Starting kubelet main sync loop" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.596596 4791 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.640762 4791 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 08 21:18:43 crc kubenswrapper[4791]: W1208 21:18:43.648169 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.648339 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.661253 4791 policy_none.go:49] "None policy: Start" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.662110 4791 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.662205 4791 state_mem.go:35] "Initializing new in-memory state store" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.696703 4791 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.723230 4791 manager.go:334] "Starting Device Plugin manager" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.723443 4791 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.723464 4791 server.go:79] "Starting device plugin registration server" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.724060 4791 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.724092 4791 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.724437 4791 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.724513 4791 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.724521 4791 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.732603 4791 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.744148 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="400ms" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.824944 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.826169 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.826226 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.826237 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.826257 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:43 crc kubenswrapper[4791]: E1208 21:18:43.826837 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.173:6443: connect: connection refused" node="crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.897320 4791 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.897554 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.899355 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.899400 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.899412 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.899568 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.899786 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.899848 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.900645 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.900680 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.900691 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.900800 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.900821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.900831 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.900861 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.901118 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.901161 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.902087 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.902175 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.902223 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.903996 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.904018 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.904028 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.904128 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.904366 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.904397 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.904942 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.904973 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.905056 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.906383 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.906656 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.906809 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907513 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907555 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907568 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907744 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907778 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907791 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907932 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.907967 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.908729 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.908760 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.908771 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.908774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.908846 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.908867 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969200 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969235 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969256 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969271 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969289 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969304 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969319 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969338 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969355 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969368 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969387 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969400 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969415 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969429 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:43 crc kubenswrapper[4791]: I1208 21:18:43.969444 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.027105 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.028804 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.028857 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.028875 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.028909 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.029599 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.173:6443: connect: connection refused" node="crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070687 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070793 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070838 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070876 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070907 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070915 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070961 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070975 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070912 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070996 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071057 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.070965 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071108 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071094 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071081 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071165 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071178 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071254 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071287 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071289 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071336 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071331 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071446 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071475 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071491 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071511 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071537 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071532 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.071593 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.145833 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="800ms" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.224564 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.232151 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.256090 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.256284 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-59045fb6c735670c82c418e1836c301c0ae2c77bbf536f160df732bbc668ea67 WatchSource:0}: Error finding container 59045fb6c735670c82c418e1836c301c0ae2c77bbf536f160df732bbc668ea67: Status 404 returned error can't find the container with id 59045fb6c735670c82c418e1836c301c0ae2c77bbf536f160df732bbc668ea67 Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.260278 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-c1d4c7e08fe1324723ed443375cae9c2e04adfc4565bfb1878de05ec40c1e3c4 WatchSource:0}: Error finding container c1d4c7e08fe1324723ed443375cae9c2e04adfc4565bfb1878de05ec40c1e3c4: Status 404 returned error can't find the container with id c1d4c7e08fe1324723ed443375cae9c2e04adfc4565bfb1878de05ec40c1e3c4 Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.266632 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-801e2a22ad502b78c99987cbe9d24e0165afb4450f00de424a464044c1c34107 WatchSource:0}: Error finding container 801e2a22ad502b78c99987cbe9d24e0165afb4450f00de424a464044c1c34107: Status 404 returned error can't find the container with id 801e2a22ad502b78c99987cbe9d24e0165afb4450f00de424a464044c1c34107 Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.270658 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.274157 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.292088 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-9eae259e2cab4246a60326c2d4a5ebeb6dae4d05987b7328a5daec9ae23e9350 WatchSource:0}: Error finding container 9eae259e2cab4246a60326c2d4a5ebeb6dae4d05987b7328a5daec9ae23e9350: Status 404 returned error can't find the container with id 9eae259e2cab4246a60326c2d4a5ebeb6dae4d05987b7328a5daec9ae23e9350 Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.299396 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-aa27e8a58858c73faede993096425fc3fa2caf5fd50b214abfc1b1908d5aea89 WatchSource:0}: Error finding container aa27e8a58858c73faede993096425fc3fa2caf5fd50b214abfc1b1908d5aea89: Status 404 returned error can't find the container with id aa27e8a58858c73faede993096425fc3fa2caf5fd50b214abfc1b1908d5aea89 Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.430613 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.432790 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.432881 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.432903 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.432949 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.433730 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.173:6443: connect: connection refused" node="crc" Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.465389 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.465471 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.472978 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.473021 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.539487 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 11:23:52.366524947 +0000 UTC Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.540112 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.653362 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.653481 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"aa27e8a58858c73faede993096425fc3fa2caf5fd50b214abfc1b1908d5aea89"} Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.653753 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.653844 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.654736 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.654775 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9eae259e2cab4246a60326c2d4a5ebeb6dae4d05987b7328a5daec9ae23e9350"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.654872 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.655566 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.655606 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.655619 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.655948 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"49dee369ad7ca59992d8cd89215b96fdadf4f391d3053d9ad8c81c3453286e77"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.656002 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"801e2a22ad502b78c99987cbe9d24e0165afb4450f00de424a464044c1c34107"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.656070 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.656609 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.656640 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.656651 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.657787 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.657817 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c1d4c7e08fe1324723ed443375cae9c2e04adfc4565bfb1878de05ec40c1e3c4"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.657938 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.658662 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.658704 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.658725 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.659885 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"009a64b9b1a3d48a736ee2aa8c80a719dfa704e61f6a8a2f23dbb568bdd521c3"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.659916 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"59045fb6c735670c82c418e1836c301c0ae2c77bbf536f160df732bbc668ea67"} Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.659972 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.660491 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.660515 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4791]: I1208 21:18:44.660526 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:44 crc kubenswrapper[4791]: W1208 21:18:44.756605 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.756693 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:44 crc kubenswrapper[4791]: E1208 21:18:44.946784 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="1.6s" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.376906 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.378780 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.378809 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.378820 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.378844 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:45 crc kubenswrapper[4791]: E1208 21:18:45.379599 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.173:6443: connect: connection refused" node="crc" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.539604 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 21:38:53.246873218 +0000 UTC Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.539657 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 312h20m7.70722127s for next certificate rotation Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.540060 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.173:6443: connect: connection refused Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.548487 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 08 21:18:45 crc kubenswrapper[4791]: E1208 21:18:45.549368 4791 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.173:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.665122 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18" exitCode=0 Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.665193 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.665325 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.666360 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.666784 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.666792 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.666937 4791 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="49dee369ad7ca59992d8cd89215b96fdadf4f391d3053d9ad8c81c3453286e77" exitCode=0 Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.667014 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"49dee369ad7ca59992d8cd89215b96fdadf4f391d3053d9ad8c81c3453286e77"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.667049 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.668618 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.668657 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.668674 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.670089 4791 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4" exitCode=0 Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.670193 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.670347 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.670679 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.673097 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.673119 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.673128 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.673680 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.673699 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.673728 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.675503 4791 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="009a64b9b1a3d48a736ee2aa8c80a719dfa704e61f6a8a2f23dbb568bdd521c3" exitCode=0 Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.675568 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"009a64b9b1a3d48a736ee2aa8c80a719dfa704e61f6a8a2f23dbb568bdd521c3"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.675595 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3082317e8e22445dd4f4e9a86e2d8fe7c764232c5972a4946dcb8c60c13256ec"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.675649 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.676541 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.676581 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.676594 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.679268 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.679297 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.679310 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852"} Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.679331 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.679956 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.679993 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4791]: I1208 21:18:45.680003 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.686460 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.686546 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.686577 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.686604 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.689428 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"af10caaed4e3a89c808f6079aa52c90d7b5388abe767a68302416cfd21a0fda9"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.689505 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"97ccc3a04813d2f4455eac810eaa1f833fe91300943d8b2ac47dcc1892a46073"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.689520 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.689531 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"37d42a95e0ac9560b58141190f8e431589faf9c15a660a4091fdcb3750ebfce1"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.690894 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.690956 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.690986 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.691339 4791 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122" exitCode=0 Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.691389 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122"} Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.691456 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.691517 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.692532 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.692601 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.692623 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.692819 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.692870 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.692935 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.980201 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.982217 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.982372 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.982508 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:46 crc kubenswrapper[4791]: I1208 21:18:46.982631 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.701291 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2"} Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.701463 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.703983 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.704232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.704386 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.705792 4791 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe" exitCode=0 Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.705896 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.706048 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.706206 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe"} Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.706460 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.706933 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.706985 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.707009 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.707828 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.708872 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:47 crc kubenswrapper[4791]: I1208 21:18:47.708914 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.710755 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34"} Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.710803 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301"} Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.710818 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2"} Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.710830 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf"} Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.710839 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.710908 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.711339 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.711663 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.711686 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.711669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.711695 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.711733 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.711750 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:48 crc kubenswrapper[4791]: I1208 21:18:48.714459 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.578999 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.722528 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.722558 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1"} Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.722768 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.724042 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.724128 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.724158 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.725159 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.725223 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.725243 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.866089 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.866402 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.868262 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.868334 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:49 crc kubenswrapper[4791]: I1208 21:18:49.868362 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.473521 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.671024 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.724410 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.724415 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.724610 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.725675 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.725720 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.725729 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.725685 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.725758 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.725819 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.726582 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.726611 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:50 crc kubenswrapper[4791]: I1208 21:18:50.726618 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.263036 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.275575 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.677535 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.727986 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.728097 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.729566 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.729610 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.729627 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.730216 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.730306 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.730351 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.954080 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.954375 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.955739 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.955845 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:51 crc kubenswrapper[4791]: I1208 21:18:51.955933 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:52 crc kubenswrapper[4791]: I1208 21:18:52.729555 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:52 crc kubenswrapper[4791]: I1208 21:18:52.730529 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:52 crc kubenswrapper[4791]: I1208 21:18:52.730584 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:52 crc kubenswrapper[4791]: I1208 21:18:52.730604 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:53 crc kubenswrapper[4791]: I1208 21:18:53.015876 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 08 21:18:53 crc kubenswrapper[4791]: I1208 21:18:53.016140 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:53 crc kubenswrapper[4791]: I1208 21:18:53.017605 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:53 crc kubenswrapper[4791]: I1208 21:18:53.017664 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:53 crc kubenswrapper[4791]: I1208 21:18:53.017683 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:53 crc kubenswrapper[4791]: I1208 21:18:53.672294 4791 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 21:18:53 crc kubenswrapper[4791]: I1208 21:18:53.672457 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:18:53 crc kubenswrapper[4791]: E1208 21:18:53.732765 4791 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 08 21:18:56 crc kubenswrapper[4791]: W1208 21:18:56.319039 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 08 21:18:56 crc kubenswrapper[4791]: I1208 21:18:56.319156 4791 trace.go:236] Trace[525678584]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:46.317) (total time: 10001ms): Dec 08 21:18:56 crc kubenswrapper[4791]: Trace[525678584]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:18:56.319) Dec 08 21:18:56 crc kubenswrapper[4791]: Trace[525678584]: [10.00128935s] [10.00128935s] END Dec 08 21:18:56 crc kubenswrapper[4791]: E1208 21:18:56.319177 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 08 21:18:56 crc kubenswrapper[4791]: I1208 21:18:56.540534 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 08 21:18:56 crc kubenswrapper[4791]: E1208 21:18:56.547900 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 08 21:18:56 crc kubenswrapper[4791]: W1208 21:18:56.653984 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 08 21:18:56 crc kubenswrapper[4791]: I1208 21:18:56.654153 4791 trace.go:236] Trace[1014624016]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:46.652) (total time: 10002ms): Dec 08 21:18:56 crc kubenswrapper[4791]: Trace[1014624016]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:18:56.653) Dec 08 21:18:56 crc kubenswrapper[4791]: Trace[1014624016]: [10.002100652s] [10.002100652s] END Dec 08 21:18:56 crc kubenswrapper[4791]: E1208 21:18:56.654188 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 08 21:18:56 crc kubenswrapper[4791]: W1208 21:18:56.671999 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 08 21:18:56 crc kubenswrapper[4791]: I1208 21:18:56.672155 4791 trace.go:236] Trace[1293782757]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:46.670) (total time: 10001ms): Dec 08 21:18:56 crc kubenswrapper[4791]: Trace[1293782757]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:18:56.671) Dec 08 21:18:56 crc kubenswrapper[4791]: Trace[1293782757]: [10.00195667s] [10.00195667s] END Dec 08 21:18:56 crc kubenswrapper[4791]: E1208 21:18:56.672192 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 08 21:18:56 crc kubenswrapper[4791]: E1208 21:18:56.984537 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 08 21:18:57 crc kubenswrapper[4791]: W1208 21:18:57.085330 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 08 21:18:57 crc kubenswrapper[4791]: I1208 21:18:57.085451 4791 trace.go:236] Trace[1813208013]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:47.083) (total time: 10001ms): Dec 08 21:18:57 crc kubenswrapper[4791]: Trace[1813208013]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:18:57.085) Dec 08 21:18:57 crc kubenswrapper[4791]: Trace[1813208013]: [10.001802283s] [10.001802283s] END Dec 08 21:18:57 crc kubenswrapper[4791]: E1208 21:18:57.085478 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 08 21:18:57 crc kubenswrapper[4791]: I1208 21:18:57.204985 4791 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 08 21:18:57 crc kubenswrapper[4791]: I1208 21:18:57.205046 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 08 21:18:57 crc kubenswrapper[4791]: I1208 21:18:57.214186 4791 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 08 21:18:57 crc kubenswrapper[4791]: I1208 21:18:57.214251 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 08 21:18:59 crc kubenswrapper[4791]: I1208 21:18:59.981773 4791 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.185503 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.186957 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.186988 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.186998 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.187019 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:19:00 crc kubenswrapper[4791]: E1208 21:19:00.190696 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.481067 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.481256 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.482565 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.482620 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:00 crc kubenswrapper[4791]: I1208 21:19:00.482637 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.485864 4791 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.539763 4791 apiserver.go:52] "Watching apiserver" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.542626 4791 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.542853 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.543141 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.543342 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.543363 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:01 crc kubenswrapper[4791]: E1208 21:19:01.543405 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.543435 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:01 crc kubenswrapper[4791]: E1208 21:19:01.543581 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.543614 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.543622 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:01 crc kubenswrapper[4791]: E1208 21:19:01.543726 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.544692 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.544994 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.545103 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.545295 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.545825 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.546350 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.546633 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.547365 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.547567 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.555380 4791 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.567129 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.579378 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.588464 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.596310 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.608932 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.619263 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.630268 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.640859 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.641077 4791 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.681513 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.685191 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.692134 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.692348 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.701418 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.710452 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.718324 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.727770 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.735482 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.744697 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.752163 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.754725 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: E1208 21:19:01.757956 4791 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.762845 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.770643 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.780060 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.789857 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:01 crc kubenswrapper[4791]: I1208 21:19:01.798377 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.209646 4791 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.222325 4791 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.234316 4791 csr.go:261] certificate signing request csr-9m97p is approved, waiting to be issued Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.241576 4791 csr.go:257] certificate signing request csr-9m97p is issued Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323014 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323088 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323115 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323140 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323159 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323176 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323197 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323220 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323240 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323260 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323280 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323308 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323329 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323351 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323397 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323419 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323452 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323477 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323500 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323524 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323545 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323567 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323588 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323612 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323636 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323685 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323748 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323756 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323776 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323822 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323863 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323922 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323944 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323968 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.323990 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324075 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324077 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324098 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324122 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324174 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324201 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324226 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324249 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324274 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324299 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324321 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324347 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324372 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324393 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324415 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324438 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324460 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324484 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324509 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324534 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324559 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324579 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325189 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325216 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325235 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325254 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325273 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325292 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325343 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325365 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325385 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325406 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325425 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325448 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325466 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325489 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325511 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325533 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325553 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325574 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325627 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325647 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325665 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325682 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325721 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325742 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325761 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325780 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325797 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325819 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325838 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325858 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325891 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325912 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325932 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325985 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326011 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326031 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326050 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326069 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326087 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326106 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326125 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326143 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326161 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326181 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326201 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326219 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326240 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326259 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324091 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.331406 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324274 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324290 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324486 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324616 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324683 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324740 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324779 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324924 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324933 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.324986 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325011 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325094 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325220 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325463 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325517 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325528 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325580 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325681 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325771 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325827 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325880 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325956 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.325985 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326045 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326185 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326236 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.326279 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.327480 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.327508 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.327539 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.327599 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.327665 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.327678 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.328201 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.328278 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.328902 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.329357 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.329613 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.329491 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.329637 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.329842 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.329979 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.330502 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.330765 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.331692 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.331883 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.331372 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.331467 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332077 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332264 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332286 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332303 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.332495 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:02.832335745 +0000 UTC m=+19.531094100 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332515 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332599 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332609 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332876 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.332933 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.333111 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.333409 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.333495 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.333627 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.333905 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.334099 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.334194 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.334210 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.334757 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.335512 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.335519 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.335782 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.335830 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.336163 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.336278 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.336495 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.336533 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.336745 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.336864 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.337105 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.338106 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.338691 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.343066 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.344381 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.344703 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.344706 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345059 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345358 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345462 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345561 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345662 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345789 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345888 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.345985 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346081 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346168 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346259 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346354 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346454 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346543 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346648 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346794 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346891 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346992 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347084 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347181 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347276 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347778 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347827 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347849 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348075 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348127 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348159 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348198 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348886 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349002 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349469 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349492 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349512 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349534 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349557 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349579 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346299 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.346909 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347506 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347753 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347753 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350110 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347761 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.347843 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348217 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348482 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348171 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348787 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348885 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349309 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.349321 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.348163 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350049 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350209 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350421 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350752 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350594 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350956 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.351086 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.351189 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.350811 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355084 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355128 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355157 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.351231 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.351686 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.353885 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.354690 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.354870 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355187 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355252 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355288 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355315 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355542 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355583 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355616 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355646 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355673 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355699 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355745 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355775 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355805 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355845 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355874 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355935 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355962 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.355988 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356015 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356041 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356066 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356093 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356121 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356147 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356174 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356199 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356224 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356251 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356277 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356305 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356330 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356359 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356386 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356411 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356444 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356487 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.356513 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.357499 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.357550 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.357866 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.357936 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358045 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358074 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358096 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358123 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358156 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358182 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358206 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358228 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358262 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358286 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358309 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358331 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358354 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358376 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358452 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358493 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358520 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358549 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358579 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358628 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358659 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358685 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358746 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358780 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358813 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358839 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358864 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358887 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359041 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359061 4791 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359076 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359090 4791 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359104 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359117 4791 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359133 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359148 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359159 4791 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359172 4791 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359185 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359199 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359214 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359227 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359240 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359255 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359267 4791 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359280 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359293 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359307 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359324 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359338 4791 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359351 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359363 4791 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359375 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359388 4791 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359400 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359412 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359425 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359438 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359452 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359465 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359479 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359492 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359507 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359519 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359532 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359545 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359561 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359574 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359586 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359599 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359612 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359625 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359638 4791 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359650 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359663 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359675 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359686 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359698 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359730 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359744 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359758 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359771 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359784 4791 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359797 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361625 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361951 4791 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361972 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361988 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362005 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362019 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362034 4791 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362049 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362068 4791 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362083 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362099 4791 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362112 4791 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363136 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363155 4791 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363170 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363185 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363198 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363209 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363218 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363228 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363238 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363247 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363257 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363272 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363284 4791 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363296 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363308 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363323 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363336 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363348 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363360 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363373 4791 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363384 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363399 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363413 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363425 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363438 4791 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363450 4791 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363462 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363476 4791 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363487 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363498 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363514 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363529 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363541 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363553 4791 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363566 4791 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363592 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363606 4791 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363620 4791 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363632 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363643 4791 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363656 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363670 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363682 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363692 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363703 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363730 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.373401 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.357996 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358172 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358182 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358401 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358503 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358883 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359314 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359424 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.359840 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.360187 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361104 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.358778 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361350 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361851 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.361948 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.362065 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363084 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.363300 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.372206 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.375690 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:02.875665433 +0000 UTC m=+19.574423778 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.376407 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.376590 4791 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.376616 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.376692 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.377594 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.377661 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.378606 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.379082 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.382057 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-np52c"] Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.382383 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.382555 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.383131 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.383211 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.383155 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.375336 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.375361 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.372607 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.383507 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.383763 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.384762 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.384948 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.385033 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:02.885007692 +0000 UTC m=+19.583766227 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.386079 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.386456 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.387359 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.388947 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.391109 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.391408 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.391786 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.392506 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.392969 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.393428 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.397994 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.402077 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.403639 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.403620 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.404098 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.404514 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.404507 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.405342 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.405947 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.405978 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.405995 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.406063 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:02.906042876 +0000 UTC m=+19.604801221 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.407189 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.407215 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.407232 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.407293 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:02.907274605 +0000 UTC m=+19.606033030 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.408432 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.410131 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.414844 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.415054 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.415386 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.415573 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.415847 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.416006 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.416054 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.417122 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.417343 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.418871 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.419137 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.419304 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.419388 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.419602 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.419722 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.419814 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.420800 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.420824 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421097 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421533 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421606 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421681 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421779 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421797 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421837 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.421937 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.422367 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.424535 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.424953 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.428507 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.429883 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.444015 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.446756 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.458229 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.467054 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.468649 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.468766 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j87l5\" (UniqueName: \"kubernetes.io/projected/0a943687-2f86-4422-854f-ab38b351b8c1-kube-api-access-j87l5\") pod \"node-resolver-np52c\" (UID: \"0a943687-2f86-4422-854f-ab38b351b8c1\") " pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.468937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469043 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469234 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0a943687-2f86-4422-854f-ab38b351b8c1-hosts-file\") pod \"node-resolver-np52c\" (UID: \"0a943687-2f86-4422-854f-ab38b351b8c1\") " pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469016 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469311 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469480 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469499 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469512 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469527 4791 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469540 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469370 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469552 4791 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469567 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469579 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469593 4791 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469604 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469618 4791 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469632 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469643 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469656 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469680 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469693 4791 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469721 4791 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469734 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469746 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469757 4791 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469769 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469781 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469793 4791 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469804 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469816 4791 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469828 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469840 4791 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469853 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469865 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469879 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469894 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469908 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469923 4791 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469935 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469947 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469960 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469972 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469982 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.469993 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470015 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470027 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470039 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470050 4791 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470061 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470072 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470084 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470095 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470106 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470118 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470129 4791 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470139 4791 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470152 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470164 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470176 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470199 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470212 4791 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470223 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470235 4791 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470248 4791 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470259 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470269 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470279 4791 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470291 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470303 4791 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470315 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470326 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470338 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470348 4791 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470358 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470366 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470376 4791 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470385 4791 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470394 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470403 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470414 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470426 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470439 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470451 4791 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470463 4791 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.470506 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.477377 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.487896 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.487917 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:iptables-alerter,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,Command:[/iptables-alerter/iptables-alerter.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONTAINER_RUNTIME_ENDPOINT,Value:unix:///run/crio/crio.sock,ValueFrom:nil,},EnvVar{Name:ALERTER_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{68157440 0} {} 65Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:iptables-alerter-script,ReadOnly:false,MountPath:/iptables-alerter,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:host-slash,ReadOnly:true,MountPath:/host,SubPath:,MountPropagation:*HostToContainer,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rczfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod iptables-alerter-4ln5h_openshift-network-operator(d75a4c96-2883-4a0b-bab2-0fab2b6c0b49): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars" logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.489484 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:19:02 crc kubenswrapper[4791]: container &Container{Name:webhook,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ -f "/env/_master" ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: set -o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: source "/env/_master" Dec 08 21:19:02 crc kubenswrapper[4791]: set +o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: # OVN-K will try to remove hybrid overlay node annotations even when the hybrid overlay is not enabled. Dec 08 21:19:02 crc kubenswrapper[4791]: # https://github.com/ovn-org/ovn-kubernetes/blob/ac6820df0b338a246f10f412cd5ec903bd234694/go-controller/pkg/ovn/master.go#L791 Dec 08 21:19:02 crc kubenswrapper[4791]: ho_enable="--enable-hybrid-overlay" Dec 08 21:19:02 crc kubenswrapper[4791]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start webhook" Dec 08 21:19:02 crc kubenswrapper[4791]: # extra-allowed-user: service account `ovn-kubernetes-control-plane` Dec 08 21:19:02 crc kubenswrapper[4791]: # sets pod annotations in multi-homing layer3 network controller (cluster-manager) Dec 08 21:19:02 crc kubenswrapper[4791]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Dec 08 21:19:02 crc kubenswrapper[4791]: --webhook-cert-dir="/etc/webhook-cert" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --webhook-host=127.0.0.1 \ Dec 08 21:19:02 crc kubenswrapper[4791]: --webhook-port=9743 \ Dec 08 21:19:02 crc kubenswrapper[4791]: ${ho_enable} \ Dec 08 21:19:02 crc kubenswrapper[4791]: --enable-interconnect \ Dec 08 21:19:02 crc kubenswrapper[4791]: --disable-approver \ Dec 08 21:19:02 crc kubenswrapper[4791]: --extra-allowed-user="system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --wait-for-kubernetes-api=200s \ Dec 08 21:19:02 crc kubenswrapper[4791]: --pod-admission-conditions="/var/run/ovnkube-identity-config/additional-pod-admission-cond.json" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --loglevel="${LOGLEVEL}" Dec 08 21:19:02 crc kubenswrapper[4791]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:KUBERNETES_NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/etc/webhook-cert/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Dec 08 21:19:02 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.489530 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"iptables-alerter\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/iptables-alerter-4ln5h" podUID="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.491451 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:19:02 crc kubenswrapper[4791]: container &Container{Name:approver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ -f "/env/_master" ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: set -o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: source "/env/_master" Dec 08 21:19:02 crc kubenswrapper[4791]: set +o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start approver" Dec 08 21:19:02 crc kubenswrapper[4791]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Dec 08 21:19:02 crc kubenswrapper[4791]: --disable-webhook \ Dec 08 21:19:02 crc kubenswrapper[4791]: --csr-acceptance-conditions="/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --loglevel="${LOGLEVEL}" Dec 08 21:19:02 crc kubenswrapper[4791]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:4,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Dec 08 21:19:02 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.492798 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"webhook\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\", failed to \"StartContainer\" for \"approver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"]" pod="openshift-network-node-identity/network-node-identity-vrzqb" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.496675 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.509610 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.520788 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.530611 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.540618 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.550332 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.557139 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.567388 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.571279 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j87l5\" (UniqueName: \"kubernetes.io/projected/0a943687-2f86-4422-854f-ab38b351b8c1-kube-api-access-j87l5\") pod \"node-resolver-np52c\" (UID: \"0a943687-2f86-4422-854f-ab38b351b8c1\") " pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.571342 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0a943687-2f86-4422-854f-ab38b351b8c1-hosts-file\") pod \"node-resolver-np52c\" (UID: \"0a943687-2f86-4422-854f-ab38b351b8c1\") " pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.571431 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0a943687-2f86-4422-854f-ab38b351b8c1-hosts-file\") pod \"node-resolver-np52c\" (UID: \"0a943687-2f86-4422-854f-ab38b351b8c1\") " pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.581500 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.590173 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.591523 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j87l5\" (UniqueName: \"kubernetes.io/projected/0a943687-2f86-4422-854f-ab38b351b8c1-kube-api-access-j87l5\") pod \"node-resolver-np52c\" (UID: \"0a943687-2f86-4422-854f-ab38b351b8c1\") " pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.734132 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-np52c" Dec 08 21:19:02 crc kubenswrapper[4791]: W1208 21:19:02.749174 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a943687_2f86_4422_854f_ab38b351b8c1.slice/crio-794d1602b7c99159f547272789b5d05ccbb724922f1512385317eebd83275b8c WatchSource:0}: Error finding container 794d1602b7c99159f547272789b5d05ccbb724922f1512385317eebd83275b8c: Status 404 returned error can't find the container with id 794d1602b7c99159f547272789b5d05ccbb724922f1512385317eebd83275b8c Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.750656 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:19:02 crc kubenswrapper[4791]: container &Container{Name:dns-node-resolver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,Command:[/bin/bash -c #!/bin/bash Dec 08 21:19:02 crc kubenswrapper[4791]: set -uo pipefail Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: trap 'jobs -p | xargs kill || true; wait; exit 0' TERM Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: OPENSHIFT_MARKER="openshift-generated-node-resolver" Dec 08 21:19:02 crc kubenswrapper[4791]: HOSTS_FILE="/etc/hosts" Dec 08 21:19:02 crc kubenswrapper[4791]: TEMP_FILE="/etc/hosts.tmp" Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: IFS=', ' read -r -a services <<< "${SERVICES}" Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # Make a temporary file with the old hosts file's attributes. Dec 08 21:19:02 crc kubenswrapper[4791]: if ! cp -f --attributes-only "${HOSTS_FILE}" "${TEMP_FILE}"; then Dec 08 21:19:02 crc kubenswrapper[4791]: echo "Failed to preserve hosts file. Exiting." Dec 08 21:19:02 crc kubenswrapper[4791]: exit 1 Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: while true; do Dec 08 21:19:02 crc kubenswrapper[4791]: declare -A svc_ips Dec 08 21:19:02 crc kubenswrapper[4791]: for svc in "${services[@]}"; do Dec 08 21:19:02 crc kubenswrapper[4791]: # Fetch service IP from cluster dns if present. We make several tries Dec 08 21:19:02 crc kubenswrapper[4791]: # to do it: IPv4, IPv6, IPv4 over TCP and IPv6 over TCP. The two last ones Dec 08 21:19:02 crc kubenswrapper[4791]: # are for deployments with Kuryr on older OpenStack (OSP13) - those do not Dec 08 21:19:02 crc kubenswrapper[4791]: # support UDP loadbalancers and require reaching DNS through TCP. Dec 08 21:19:02 crc kubenswrapper[4791]: cmds=('dig -t A @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"' Dec 08 21:19:02 crc kubenswrapper[4791]: 'dig -t AAAA @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"' Dec 08 21:19:02 crc kubenswrapper[4791]: 'dig -t A +tcp +retry=0 @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"' Dec 08 21:19:02 crc kubenswrapper[4791]: 'dig -t AAAA +tcp +retry=0 @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"') Dec 08 21:19:02 crc kubenswrapper[4791]: for i in ${!cmds[*]} Dec 08 21:19:02 crc kubenswrapper[4791]: do Dec 08 21:19:02 crc kubenswrapper[4791]: ips=($(eval "${cmds[i]}")) Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ "$?" -eq 0 && "${#ips[@]}" -ne 0 ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: svc_ips["${svc}"]="${ips[@]}" Dec 08 21:19:02 crc kubenswrapper[4791]: break Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # Update /etc/hosts only if we get valid service IPs Dec 08 21:19:02 crc kubenswrapper[4791]: # We will not update /etc/hosts when there is coredns service outage or api unavailability Dec 08 21:19:02 crc kubenswrapper[4791]: # Stale entries could exist in /etc/hosts if the service is deleted Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ -n "${svc_ips[*]-}" ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: # Build a new hosts file from /etc/hosts with our custom entries filtered out Dec 08 21:19:02 crc kubenswrapper[4791]: if ! sed --silent "/# ${OPENSHIFT_MARKER}/d; w ${TEMP_FILE}" "${HOSTS_FILE}"; then Dec 08 21:19:02 crc kubenswrapper[4791]: # Only continue rebuilding the hosts entries if its original content is preserved Dec 08 21:19:02 crc kubenswrapper[4791]: sleep 60 & wait Dec 08 21:19:02 crc kubenswrapper[4791]: continue Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # Append resolver entries for services Dec 08 21:19:02 crc kubenswrapper[4791]: rc=0 Dec 08 21:19:02 crc kubenswrapper[4791]: for svc in "${!svc_ips[@]}"; do Dec 08 21:19:02 crc kubenswrapper[4791]: for ip in ${svc_ips[${svc}]}; do Dec 08 21:19:02 crc kubenswrapper[4791]: echo "${ip} ${svc} ${svc}.${CLUSTER_DOMAIN} # ${OPENSHIFT_MARKER}" >> "${TEMP_FILE}" || rc=$? Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ $rc -ne 0 ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: sleep 60 & wait Dec 08 21:19:02 crc kubenswrapper[4791]: continue Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # TODO: Update /etc/hosts atomically to avoid any inconsistent behavior Dec 08 21:19:02 crc kubenswrapper[4791]: # Replace /etc/hosts with our modified version if needed Dec 08 21:19:02 crc kubenswrapper[4791]: cmp "${TEMP_FILE}" "${HOSTS_FILE}" || cp -f "${TEMP_FILE}" "${HOSTS_FILE}" Dec 08 21:19:02 crc kubenswrapper[4791]: # TEMP_FILE is not removed to avoid file create/delete and attributes copy churn Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: sleep 60 & wait Dec 08 21:19:02 crc kubenswrapper[4791]: unset svc_ips Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:SERVICES,Value:image-registry.openshift-image-registry.svc,ValueFrom:nil,},EnvVar{Name:NAMESERVER,Value:10.217.4.10,ValueFrom:nil,},EnvVar{Name:CLUSTER_DOMAIN,Value:cluster.local,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{22020096 0} {} 21Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:hosts-file,ReadOnly:false,MountPath:/etc/hosts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j87l5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod node-resolver-np52c_openshift-dns(0a943687-2f86-4422-854f-ab38b351b8c1): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Dec 08 21:19:02 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.751774 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dns-node-resolver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-dns/node-resolver-np52c" podUID="0a943687-2f86-4422-854f-ab38b351b8c1" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.751881 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-49gdc"] Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.752335 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.752339 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-bpjxc"] Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.753088 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-kgd9w"] Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.753361 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.753926 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.755425 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7ef379989ec4b80ff174a145108a0b47a2850711ac849629d52f274623a94a4f"} Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.756738 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.756837 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.756976 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757116 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-np52c" event={"ID":"0a943687-2f86-4422-854f-ab38b351b8c1","Type":"ContainerStarted","Data":"794d1602b7c99159f547272789b5d05ccbb724922f1512385317eebd83275b8c"} Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.757201 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:19:02 crc kubenswrapper[4791]: container &Container{Name:webhook,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ -f "/env/_master" ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: set -o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: source "/env/_master" Dec 08 21:19:02 crc kubenswrapper[4791]: set +o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: # OVN-K will try to remove hybrid overlay node annotations even when the hybrid overlay is not enabled. Dec 08 21:19:02 crc kubenswrapper[4791]: # https://github.com/ovn-org/ovn-kubernetes/blob/ac6820df0b338a246f10f412cd5ec903bd234694/go-controller/pkg/ovn/master.go#L791 Dec 08 21:19:02 crc kubenswrapper[4791]: ho_enable="--enable-hybrid-overlay" Dec 08 21:19:02 crc kubenswrapper[4791]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start webhook" Dec 08 21:19:02 crc kubenswrapper[4791]: # extra-allowed-user: service account `ovn-kubernetes-control-plane` Dec 08 21:19:02 crc kubenswrapper[4791]: # sets pod annotations in multi-homing layer3 network controller (cluster-manager) Dec 08 21:19:02 crc kubenswrapper[4791]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Dec 08 21:19:02 crc kubenswrapper[4791]: --webhook-cert-dir="/etc/webhook-cert" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --webhook-host=127.0.0.1 \ Dec 08 21:19:02 crc kubenswrapper[4791]: --webhook-port=9743 \ Dec 08 21:19:02 crc kubenswrapper[4791]: ${ho_enable} \ Dec 08 21:19:02 crc kubenswrapper[4791]: --enable-interconnect \ Dec 08 21:19:02 crc kubenswrapper[4791]: --disable-approver \ Dec 08 21:19:02 crc kubenswrapper[4791]: --extra-allowed-user="system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --wait-for-kubernetes-api=200s \ Dec 08 21:19:02 crc kubenswrapper[4791]: --pod-admission-conditions="/var/run/ovnkube-identity-config/additional-pod-admission-cond.json" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --loglevel="${LOGLEVEL}" Dec 08 21:19:02 crc kubenswrapper[4791]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:KUBERNETES_NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/etc/webhook-cert/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Dec 08 21:19:02 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757252 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757132 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757393 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757406 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757195 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757230 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757264 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757281 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.757773 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.758392 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:19:02 crc kubenswrapper[4791]: container &Container{Name:dns-node-resolver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,Command:[/bin/bash -c #!/bin/bash Dec 08 21:19:02 crc kubenswrapper[4791]: set -uo pipefail Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: trap 'jobs -p | xargs kill || true; wait; exit 0' TERM Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: OPENSHIFT_MARKER="openshift-generated-node-resolver" Dec 08 21:19:02 crc kubenswrapper[4791]: HOSTS_FILE="/etc/hosts" Dec 08 21:19:02 crc kubenswrapper[4791]: TEMP_FILE="/etc/hosts.tmp" Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: IFS=', ' read -r -a services <<< "${SERVICES}" Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # Make a temporary file with the old hosts file's attributes. Dec 08 21:19:02 crc kubenswrapper[4791]: if ! cp -f --attributes-only "${HOSTS_FILE}" "${TEMP_FILE}"; then Dec 08 21:19:02 crc kubenswrapper[4791]: echo "Failed to preserve hosts file. Exiting." Dec 08 21:19:02 crc kubenswrapper[4791]: exit 1 Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: while true; do Dec 08 21:19:02 crc kubenswrapper[4791]: declare -A svc_ips Dec 08 21:19:02 crc kubenswrapper[4791]: for svc in "${services[@]}"; do Dec 08 21:19:02 crc kubenswrapper[4791]: # Fetch service IP from cluster dns if present. We make several tries Dec 08 21:19:02 crc kubenswrapper[4791]: # to do it: IPv4, IPv6, IPv4 over TCP and IPv6 over TCP. The two last ones Dec 08 21:19:02 crc kubenswrapper[4791]: # are for deployments with Kuryr on older OpenStack (OSP13) - those do not Dec 08 21:19:02 crc kubenswrapper[4791]: # support UDP loadbalancers and require reaching DNS through TCP. Dec 08 21:19:02 crc kubenswrapper[4791]: cmds=('dig -t A @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"' Dec 08 21:19:02 crc kubenswrapper[4791]: 'dig -t AAAA @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"' Dec 08 21:19:02 crc kubenswrapper[4791]: 'dig -t A +tcp +retry=0 @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"' Dec 08 21:19:02 crc kubenswrapper[4791]: 'dig -t AAAA +tcp +retry=0 @"${NAMESERVER}" +short "${svc}.${CLUSTER_DOMAIN}"|grep -v "^;"') Dec 08 21:19:02 crc kubenswrapper[4791]: for i in ${!cmds[*]} Dec 08 21:19:02 crc kubenswrapper[4791]: do Dec 08 21:19:02 crc kubenswrapper[4791]: ips=($(eval "${cmds[i]}")) Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ "$?" -eq 0 && "${#ips[@]}" -ne 0 ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: svc_ips["${svc}"]="${ips[@]}" Dec 08 21:19:02 crc kubenswrapper[4791]: break Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # Update /etc/hosts only if we get valid service IPs Dec 08 21:19:02 crc kubenswrapper[4791]: # We will not update /etc/hosts when there is coredns service outage or api unavailability Dec 08 21:19:02 crc kubenswrapper[4791]: # Stale entries could exist in /etc/hosts if the service is deleted Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ -n "${svc_ips[*]-}" ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: # Build a new hosts file from /etc/hosts with our custom entries filtered out Dec 08 21:19:02 crc kubenswrapper[4791]: if ! sed --silent "/# ${OPENSHIFT_MARKER}/d; w ${TEMP_FILE}" "${HOSTS_FILE}"; then Dec 08 21:19:02 crc kubenswrapper[4791]: # Only continue rebuilding the hosts entries if its original content is preserved Dec 08 21:19:02 crc kubenswrapper[4791]: sleep 60 & wait Dec 08 21:19:02 crc kubenswrapper[4791]: continue Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # Append resolver entries for services Dec 08 21:19:02 crc kubenswrapper[4791]: rc=0 Dec 08 21:19:02 crc kubenswrapper[4791]: for svc in "${!svc_ips[@]}"; do Dec 08 21:19:02 crc kubenswrapper[4791]: for ip in ${svc_ips[${svc}]}; do Dec 08 21:19:02 crc kubenswrapper[4791]: echo "${ip} ${svc} ${svc}.${CLUSTER_DOMAIN} # ${OPENSHIFT_MARKER}" >> "${TEMP_FILE}" || rc=$? Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ $rc -ne 0 ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: sleep 60 & wait Dec 08 21:19:02 crc kubenswrapper[4791]: continue Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: # TODO: Update /etc/hosts atomically to avoid any inconsistent behavior Dec 08 21:19:02 crc kubenswrapper[4791]: # Replace /etc/hosts with our modified version if needed Dec 08 21:19:02 crc kubenswrapper[4791]: cmp "${TEMP_FILE}" "${HOSTS_FILE}" || cp -f "${TEMP_FILE}" "${HOSTS_FILE}" Dec 08 21:19:02 crc kubenswrapper[4791]: # TEMP_FILE is not removed to avoid file create/delete and attributes copy churn Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: sleep 60 & wait Dec 08 21:19:02 crc kubenswrapper[4791]: unset svc_ips Dec 08 21:19:02 crc kubenswrapper[4791]: done Dec 08 21:19:02 crc kubenswrapper[4791]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:SERVICES,Value:image-registry.openshift-image-registry.svc,ValueFrom:nil,},EnvVar{Name:NAMESERVER,Value:10.217.4.10,ValueFrom:nil,},EnvVar{Name:CLUSTER_DOMAIN,Value:cluster.local,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{22020096 0} {} 21Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:hosts-file,ReadOnly:false,MountPath:/etc/hosts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j87l5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod node-resolver-np52c_openshift-dns(0a943687-2f86-4422-854f-ab38b351b8c1): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Dec 08 21:19:02 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.760084 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:19:02 crc kubenswrapper[4791]: container &Container{Name:approver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ -f "/env/_master" ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: set -o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: source "/env/_master" Dec 08 21:19:02 crc kubenswrapper[4791]: set +o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: Dec 08 21:19:02 crc kubenswrapper[4791]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start approver" Dec 08 21:19:02 crc kubenswrapper[4791]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Dec 08 21:19:02 crc kubenswrapper[4791]: --disable-webhook \ Dec 08 21:19:02 crc kubenswrapper[4791]: --csr-acceptance-conditions="/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json" \ Dec 08 21:19:02 crc kubenswrapper[4791]: --loglevel="${LOGLEVEL}" Dec 08 21:19:02 crc kubenswrapper[4791]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:4,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Dec 08 21:19:02 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.760196 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.760204 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dns-node-resolver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-dns/node-resolver-np52c" podUID="0a943687-2f86-4422-854f-ab38b351b8c1" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.760251 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7346f61f3d14c090d797b4295d95fd0aca0cdc478599966976a47d164c3a4b04"} Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.761445 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"webhook\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\", failed to \"StartContainer\" for \"approver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"]" pod="openshift-network-node-identity/network-node-identity-vrzqb" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.762007 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:iptables-alerter,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,Command:[/iptables-alerter/iptables-alerter.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONTAINER_RUNTIME_ENDPOINT,Value:unix:///run/crio/crio.sock,ValueFrom:nil,},EnvVar{Name:ALERTER_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{68157440 0} {} 65Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:iptables-alerter-script,ReadOnly:false,MountPath:/iptables-alerter,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:host-slash,ReadOnly:true,MountPath:/host,SubPath:,MountPropagation:*HostToContainer,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rczfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod iptables-alerter-4ln5h_openshift-network-operator(d75a4c96-2883-4a0b-bab2-0fab2b6c0b49): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars" logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.763926 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"iptables-alerter\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/iptables-alerter-4ln5h" podUID="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.771319 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772703 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-system-cni-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772811 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-cni-bin\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772858 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-daemon-config\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772880 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94fd8\" (UniqueName: \"kubernetes.io/projected/dd95c042-30cb-438f-8e98-9aebe3ea93bc-kube-api-access-94fd8\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772905 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dd95c042-30cb-438f-8e98-9aebe3ea93bc-cni-binary-copy\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772926 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-conf-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772945 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-hostroot\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.772970 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx4hj\" (UniqueName: \"kubernetes.io/projected/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-kube-api-access-sx4hj\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773156 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-cnibin\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773205 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-os-release\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773233 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-cnibin\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773256 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-k8s-cni-cncf-io\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773293 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-rootfs\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773324 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-etc-kubernetes\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773357 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-socket-dir-parent\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773397 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-cni-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773419 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-netns\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773443 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-kubelet\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773464 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73955741-20a9-4a15-808b-c72dafba6dce-cni-binary-copy\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773497 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-multus-certs\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773518 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/73955741-20a9-4a15-808b-c72dafba6dce-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773568 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-system-cni-dir\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773589 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-cni-multus\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773614 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-os-release\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773647 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773671 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-mcd-auth-proxy-config\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773699 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp7z7\" (UniqueName: \"kubernetes.io/projected/73955741-20a9-4a15-808b-c72dafba6dce-kube-api-access-lp7z7\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.773742 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-proxy-tls\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.774640 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:19:02 crc kubenswrapper[4791]: container &Container{Name:network-operator,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,Command:[/bin/bash -c #!/bin/bash Dec 08 21:19:02 crc kubenswrapper[4791]: set -o allexport Dec 08 21:19:02 crc kubenswrapper[4791]: if [[ -f /etc/kubernetes/apiserver-url.env ]]; then Dec 08 21:19:02 crc kubenswrapper[4791]: source /etc/kubernetes/apiserver-url.env Dec 08 21:19:02 crc kubenswrapper[4791]: else Dec 08 21:19:02 crc kubenswrapper[4791]: echo "Error: /etc/kubernetes/apiserver-url.env is missing" Dec 08 21:19:02 crc kubenswrapper[4791]: exit 1 Dec 08 21:19:02 crc kubenswrapper[4791]: fi Dec 08 21:19:02 crc kubenswrapper[4791]: exec /usr/bin/cluster-network-operator start --listen=0.0.0.0:9104 Dec 08 21:19:02 crc kubenswrapper[4791]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:cno,HostPort:9104,ContainerPort:9104,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:RELEASE_VERSION,Value:4.18.1,ValueFrom:nil,},EnvVar{Name:KUBE_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b97554198294bf544fbc116c94a0a1fb2ec8a4de0e926bf9d9e320135f0bee6f,ValueFrom:nil,},EnvVar{Name:KUBE_RBAC_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09,ValueFrom:nil,},EnvVar{Name:MULTUS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26,ValueFrom:nil,},EnvVar{Name:MULTUS_ADMISSION_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317,ValueFrom:nil,},EnvVar{Name:CNI_PLUGINS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc,ValueFrom:nil,},EnvVar{Name:BOND_CNI_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78,ValueFrom:nil,},EnvVar{Name:WHEREABOUTS_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4,ValueFrom:nil,},EnvVar{Name:ROUTE_OVERRRIDE_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa,ValueFrom:nil,},EnvVar{Name:MULTUS_NETWORKPOLICY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:23f833d3738d68706eb2f2868bd76bd71cee016cffa6faf5f045a60cc8c6eddd,ValueFrom:nil,},EnvVar{Name:OVN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,ValueFrom:nil,},EnvVar{Name:OVN_NB_RAFT_ELECTION_TIMER,Value:10,ValueFrom:nil,},EnvVar{Name:OVN_SB_RAFT_ELECTION_TIMER,Value:16,ValueFrom:nil,},EnvVar{Name:OVN_NORTHD_PROBE_INTERVAL,Value:10000,ValueFrom:nil,},EnvVar{Name:OVN_CONTROLLER_INACTIVITY_PROBE,Value:180000,ValueFrom:nil,},EnvVar{Name:OVN_NB_INACTIVITY_PROBE,Value:60000,ValueFrom:nil,},EnvVar{Name:EGRESS_ROUTER_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c,ValueFrom:nil,},EnvVar{Name:NETWORK_METRICS_DAEMON_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_SOURCE_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_TARGET_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_OPERATOR_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:CLOUD_NETWORK_CONFIG_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8048f1cb0be521f09749c0a489503cd56d85b68c6ca93380e082cfd693cd97a8,ValueFrom:nil,},EnvVar{Name:CLI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,ValueFrom:nil,},EnvVar{Name:FRR_K8S_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5dbf844e49bb46b78586930149e5e5f5dc121014c8afd10fe36f3651967cc256,ValueFrom:nil,},EnvVar{Name:NETWORKING_CONSOLE_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd,ValueFrom:nil,},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host-etc-kube,ReadOnly:true,MountPath:/etc/kubernetes,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-tls,ReadOnly:false,MountPath:/var/run/secrets/serving-cert,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rdwmf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-operator-58b4c7f79c-55gtf_openshift-network-operator(37a5e44f-9a88-4405-be8a-b645485e7312): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Dec 08 21:19:02 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.775981 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"network-operator\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" podUID="37a5e44f-9a88-4405-be8a-b645485e7312" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.786821 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.804903 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.821238 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.844210 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.856543 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.872872 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874089 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874186 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-kubelet\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874221 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73955741-20a9-4a15-808b-c72dafba6dce-cni-binary-copy\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874282 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-kubelet\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.874301 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:03.874271153 +0000 UTC m=+20.573029508 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874359 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-multus-certs\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874394 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/73955741-20a9-4a15-808b-c72dafba6dce-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874429 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-system-cni-dir\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874438 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-multus-certs\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874469 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-cni-multus\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874492 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-os-release\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874502 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-system-cni-dir\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874512 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874534 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-mcd-auth-proxy-config\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874538 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-cni-multus\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874561 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp7z7\" (UniqueName: \"kubernetes.io/projected/73955741-20a9-4a15-808b-c72dafba6dce-kube-api-access-lp7z7\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874581 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-proxy-tls\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874618 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-system-cni-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874638 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-cni-bin\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874659 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-daemon-config\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874681 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94fd8\" (UniqueName: \"kubernetes.io/projected/dd95c042-30cb-438f-8e98-9aebe3ea93bc-kube-api-access-94fd8\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874723 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dd95c042-30cb-438f-8e98-9aebe3ea93bc-cni-binary-copy\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874747 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-conf-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874769 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-hostroot\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874790 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx4hj\" (UniqueName: \"kubernetes.io/projected/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-kube-api-access-sx4hj\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874815 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-os-release\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874840 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-cnibin\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874868 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-os-release\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874873 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-cnibin\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874910 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-k8s-cni-cncf-io\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874917 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-cnibin\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874929 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-etc-kubernetes\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874946 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-rootfs\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874965 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-socket-dir-parent\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.874990 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-cni-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875006 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-netns\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875025 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73955741-20a9-4a15-808b-c72dafba6dce-cni-binary-copy\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875044 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-netns\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875067 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-rootfs\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875074 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-etc-kubernetes\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875109 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-socket-dir-parent\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875158 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/73955741-20a9-4a15-808b-c72dafba6dce-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875289 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-cni-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875327 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-run-k8s-cni-cncf-io\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875326 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875377 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-conf-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875417 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-os-release\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875444 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-hostroot\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875469 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-host-var-lib-cni-bin\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875481 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73955741-20a9-4a15-808b-c72dafba6dce-cnibin\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875498 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dd95c042-30cb-438f-8e98-9aebe3ea93bc-system-cni-dir\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875816 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dd95c042-30cb-438f-8e98-9aebe3ea93bc-cni-binary-copy\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.875829 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-mcd-auth-proxy-config\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.876040 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dd95c042-30cb-438f-8e98-9aebe3ea93bc-multus-daemon-config\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.878779 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-proxy-tls\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.887353 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.889048 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.894288 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.898077 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp7z7\" (UniqueName: \"kubernetes.io/projected/73955741-20a9-4a15-808b-c72dafba6dce-kube-api-access-lp7z7\") pod \"multus-additional-cni-plugins-bpjxc\" (UID: \"73955741-20a9-4a15-808b-c72dafba6dce\") " pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.898129 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94fd8\" (UniqueName: \"kubernetes.io/projected/dd95c042-30cb-438f-8e98-9aebe3ea93bc-kube-api-access-94fd8\") pod \"multus-49gdc\" (UID: \"dd95c042-30cb-438f-8e98-9aebe3ea93bc\") " pod="openshift-multus/multus-49gdc" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.900036 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.901462 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx4hj\" (UniqueName: \"kubernetes.io/projected/6cdfecf8-95cf-4c2b-b98f-eb7bb055771d-kube-api-access-sx4hj\") pod \"machine-config-daemon-kgd9w\" (UID: \"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\") " pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.911694 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.921051 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.927867 4791 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.935506 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.943420 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.951955 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.960686 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.962749 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.971501 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.975805 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.975850 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.975869 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.975890 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.975950 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.975981 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.975987 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.975994 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976042 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:03.976029033 +0000 UTC m=+20.674787378 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976040 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976070 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976057 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:03.976051503 +0000 UTC m=+20.674809848 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976103 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976126 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976133 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:03.976116425 +0000 UTC m=+20.674874770 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:02 crc kubenswrapper[4791]: E1208 21:19:02.976189 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:03.976167876 +0000 UTC m=+20.674926221 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.981014 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:02 crc kubenswrapper[4791]: I1208 21:19:02.988283 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.000130 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.011961 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.042351 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.050298 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.054296 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.059999 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.062558 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.068304 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-49gdc" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.071599 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.078420 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.083663 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.085458 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.090339 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cdfecf8_95cf_4c2b_b98f_eb7bb055771d.slice/crio-aba87e0829954d125df4a593aef8c3c8a369a63b3c733cf2624305818925e969 WatchSource:0}: Error finding container aba87e0829954d125df4a593aef8c3c8a369a63b3c733cf2624305818925e969: Status 404 returned error can't find the container with id aba87e0829954d125df4a593aef8c3c8a369a63b3c733cf2624305818925e969 Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.094182 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.100130 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73955741_20a9_4a15_808b_c72dafba6dce.slice/crio-54f158be3b0036c6b349af677f2552758b3f85ac2669f54dc70e30a4c7b45b08 WatchSource:0}: Error finding container 54f158be3b0036c6b349af677f2552758b3f85ac2669f54dc70e30a4c7b45b08: Status 404 returned error can't find the container with id 54f158be3b0036c6b349af677f2552758b3f85ac2669f54dc70e30a4c7b45b08 Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.107910 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.116336 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dk8tz"] Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.117230 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.118703 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.119697 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.119724 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.119761 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.119902 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.119908 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.119953 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.119982 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.149077 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179231 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-env-overrides\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179268 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfx26\" (UniqueName: \"kubernetes.io/projected/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-kube-api-access-dfx26\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179298 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179313 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-node-log\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179330 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovn-node-metrics-cert\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179347 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-log-socket\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179367 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-ovn\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179383 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-kubelet\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179410 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-script-lib\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179427 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-etc-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179441 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-netd\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179464 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-netns\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179479 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-var-lib-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179496 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179517 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-ovn-kubernetes\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179532 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-systemd\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179546 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-config\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179586 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-systemd-units\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179602 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-slash\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.179633 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-bin\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.188640 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.227127 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.243287 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-08 21:14:02 +0000 UTC, rotation deadline is 2026-09-18 19:50:09.151050675 +0000 UTC Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.243342 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6814h31m5.907711004s for next certificate rotation Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.274173 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280282 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-env-overrides\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280317 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfx26\" (UniqueName: \"kubernetes.io/projected/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-kube-api-access-dfx26\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280342 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280357 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-node-log\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280375 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovn-node-metrics-cert\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280426 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-log-socket\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280444 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-ovn\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280464 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-kubelet\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280480 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-script-lib\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280501 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-etc-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280515 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-netd\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280533 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280558 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-netns\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280573 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-var-lib-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280586 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-ovn-kubernetes\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280603 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-systemd\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280619 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-config\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280659 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-systemd-units\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280674 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-slash\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280688 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-bin\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280756 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-bin\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280791 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-netd\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280791 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-etc-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280813 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280837 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-netns\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280859 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-var-lib-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280881 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-ovn-kubernetes\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.280906 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-systemd\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281510 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-env-overrides\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281563 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-log-socket\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281649 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-kubelet\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281723 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-systemd-units\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281769 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-slash\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281731 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-ovn\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281772 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-config\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281803 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-node-log\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.281799 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-openvswitch\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.282204 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-script-lib\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.287443 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovn-node-metrics-cert\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.320190 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfx26\" (UniqueName: \"kubernetes.io/projected/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-kube-api-access-dfx26\") pod \"ovnkube-node-dk8tz\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.328820 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.374983 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.411381 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.447878 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.447981 4791 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448190 4791 reflector.go:484] object-"openshift-machine-config-operator"/"proxy-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"proxy-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448428 4791 reflector.go:484] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448497 4791 reflector.go:484] object-"openshift-multus"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448448 4791 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448539 4791 reflector.go:484] object-"openshift-multus"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448582 4791 reflector.go:484] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448556 4791 reflector.go:484] object-"openshift-multus"/"default-cni-sysctl-allowlist": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"default-cni-sysctl-allowlist": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448615 4791 reflector.go:484] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448635 4791 reflector.go:484] object-"openshift-multus"/"multus-daemon-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"multus-daemon-config": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448648 4791 reflector.go:484] object-"openshift-multus"/"cni-copy-resources": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"cni-copy-resources": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448783 4791 reflector.go:484] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448805 4791 reflector.go:484] object-"openshift-machine-config-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448908 4791 reflector.go:484] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448986 4791 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.448989 4791 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.449011 4791 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-config": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.449030 4791 reflector.go:484] object-"openshift-machine-config-operator"/"kube-rbac-proxy": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-rbac-proxy": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.449041 4791 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.449150 4791 reflector.go:484] object-"openshift-ovn-kubernetes"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.449170 4791 reflector.go:484] object-"openshift-multus"/"default-dockercfg-2q5b6": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"default-dockercfg-2q5b6": Unexpected watch close - watch lasted less than a second and no items received Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.459493 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.489907 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: W1208 21:19:03.502747 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b3968ef_9912_4b4e_bb09_95ab9d9c19c5.slice/crio-c60211e1d44c665738cfe3a046e7c78aa331225d6c7c26c2f3e1043dc2e3d3c2 WatchSource:0}: Error finding container c60211e1d44c665738cfe3a046e7c78aa331225d6c7c26c2f3e1043dc2e3d3c2: Status 404 returned error can't find the container with id c60211e1d44c665738cfe3a046e7c78aa331225d6c7c26c2f3e1043dc2e3d3c2 Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.526889 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.568876 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.596824 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.596865 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.596854 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.596965 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.597056 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.597156 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.603451 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.604189 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.604940 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.605596 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.607630 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.608238 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.608853 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.609868 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.610498 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.611766 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.612329 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.613226 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.613931 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.616311 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.616960 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.618129 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.618788 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.619472 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.620301 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.620972 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.621566 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.622569 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.623347 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.624263 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.624980 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.625441 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.627323 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.628349 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.628859 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.629777 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.630282 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.631157 4791 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.631256 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.632956 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.634025 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.634474 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.636193 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.637426 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.637990 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.638977 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.639629 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.640151 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.644001 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.645084 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.645684 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.646655 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.647200 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.648335 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.649088 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.650890 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.653021 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.653544 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.654103 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.655219 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.655833 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.656926 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.691805 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.729619 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.765324 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e" exitCode=0 Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.765403 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.765450 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"c60211e1d44c665738cfe3a046e7c78aa331225d6c7c26c2f3e1043dc2e3d3c2"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.767239 4791 generic.go:334] "Generic (PLEG): container finished" podID="73955741-20a9-4a15-808b-c72dafba6dce" containerID="f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4" exitCode=0 Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.767272 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerDied","Data":"f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.767309 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerStarted","Data":"54f158be3b0036c6b349af677f2552758b3f85ac2669f54dc70e30a4c7b45b08"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.769585 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.769652 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.769665 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"aba87e0829954d125df4a593aef8c3c8a369a63b3c733cf2624305818925e969"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.772958 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.784848 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-49gdc" event={"ID":"dd95c042-30cb-438f-8e98-9aebe3ea93bc","Type":"ContainerStarted","Data":"4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.784916 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-49gdc" event={"ID":"dd95c042-30cb-438f-8e98-9aebe3ea93bc","Type":"ContainerStarted","Data":"924dec9a6fcb4835931750472f055f28e43bfe9bbdc8568ea2c3e4c4134abaca"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.789279 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"680ab83b9fdac646142a18cc76d8f8da28df3ece3a9c0d249499636f28f7f549"} Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.868083 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.890072 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.890593 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.890559911 +0000 UTC m=+22.589318296 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.890795 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.921579 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.958420 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.969565 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.990949 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.991208 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.991316 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:03 crc kubenswrapper[4791]: I1208 21:19:03.991495 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.991151 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.991651 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.991629875 +0000 UTC m=+22.690388260 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.991565 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.991870 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.99183697 +0000 UTC m=+22.690595315 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.991873 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.992005 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.992060 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.992150 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.992135287 +0000 UTC m=+22.690893632 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.992011 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.992270 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.992289 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:03 crc kubenswrapper[4791]: E1208 21:19:03.992326 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.992317081 +0000 UTC m=+22.691075426 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.018663 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.048073 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.088205 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.127214 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.165293 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.208212 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.247030 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.287703 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.328632 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.361602 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.382478 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.408514 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.422438 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.442545 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.488914 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.501942 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.556584 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.562484 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.582546 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.623205 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.649178 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.662897 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.701913 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.731609 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.742532 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.782278 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.793623 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerStarted","Data":"1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e"} Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.800567 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e"} Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.802146 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce"} Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.813080 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.822244 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.864624 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.882977 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.910981 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.921411 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 08 21:19:04 crc kubenswrapper[4791]: I1208 21:19:04.969838 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.008727 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.041420 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.070599 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.081772 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.102457 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.149593 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.189026 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.226078 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.270200 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.314476 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.349396 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.386700 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.427027 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.466865 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.507634 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.548929 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.596946 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.596951 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:05 crc kubenswrapper[4791]: E1208 21:19:05.597089 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:05 crc kubenswrapper[4791]: E1208 21:19:05.597218 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.597491 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:05 crc kubenswrapper[4791]: E1208 21:19:05.597578 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.601813 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.628872 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.666523 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.706458 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.751680 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.789939 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.810738 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6"} Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.810797 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a"} Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.810812 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007"} Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.810821 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125"} Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.810837 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56"} Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.812680 4791 generic.go:334] "Generic (PLEG): container finished" podID="73955741-20a9-4a15-808b-c72dafba6dce" containerID="1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e" exitCode=0 Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.812726 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerDied","Data":"1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e"} Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.828040 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.865974 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.908534 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:05 crc kubenswrapper[4791]: E1208 21:19:05.908815 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:09.90878366 +0000 UTC m=+26.607542005 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.911024 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.953554 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:05 crc kubenswrapper[4791]: I1208 21:19:05.986894 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.009873 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.009924 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.009970 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.010001 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010069 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010086 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010092 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010097 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010095 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010144 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010106 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010182 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:10.010161811 +0000 UTC m=+26.708920216 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010103 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010207 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:10.010196941 +0000 UTC m=+26.708955386 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010227 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:10.010219222 +0000 UTC m=+26.708977637 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.010243 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:10.010234852 +0000 UTC m=+26.708993297 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.028174 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.074812 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.108900 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.147103 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.187422 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.227054 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.268067 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.307533 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.349218 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.388623 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.429671 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.471070 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.513363 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.590914 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.592655 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.592694 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.592728 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.592837 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.600384 4791 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.600733 4791 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.601679 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.601728 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.601741 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.601756 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.601767 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.619053 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d08c457-44d8-4000-aa7e-f79b560f907e\\\",\\\"systemUUID\\\":\\\"b32d8891-0397-496e-a082-5c392c97eb30\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.623568 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.623601 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.623611 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.623625 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.623636 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.639085 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d08c457-44d8-4000-aa7e-f79b560f907e\\\",\\\"systemUUID\\\":\\\"b32d8891-0397-496e-a082-5c392c97eb30\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.644070 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.644124 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.644137 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.644158 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.644170 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.659741 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d08c457-44d8-4000-aa7e-f79b560f907e\\\",\\\"systemUUID\\\":\\\"b32d8891-0397-496e-a082-5c392c97eb30\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.667290 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.667337 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.667347 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.667366 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.667379 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.677684 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d08c457-44d8-4000-aa7e-f79b560f907e\\\",\\\"systemUUID\\\":\\\"b32d8891-0397-496e-a082-5c392c97eb30\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.681125 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.681164 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.681174 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.681187 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.681198 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.689561 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d08c457-44d8-4000-aa7e-f79b560f907e\\\",\\\"systemUUID\\\":\\\"b32d8891-0397-496e-a082-5c392c97eb30\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: E1208 21:19:06.689771 4791 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.691405 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.691457 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.691470 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.691492 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.691505 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.793625 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.793658 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.793668 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.793684 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.793694 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.817044 4791 generic.go:334] "Generic (PLEG): container finished" podID="73955741-20a9-4a15-808b-c72dafba6dce" containerID="c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30" exitCode=0 Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.817094 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerDied","Data":"c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30"} Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.833491 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.849926 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.860394 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.870815 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.884651 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.900830 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.900873 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.900883 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.900897 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.900906 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.907371 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.917958 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.927252 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.944058 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.953539 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:06 crc kubenswrapper[4791]: I1208 21:19:06.989190 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.002909 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.002940 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.002949 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.002962 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.002971 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.027586 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.067382 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.105913 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.105954 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.105963 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.105978 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.105988 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.111364 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.208141 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.208178 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.208189 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.208203 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.208213 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.310479 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.310547 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.310563 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.310579 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.310591 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.412556 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.412598 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.412609 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.412622 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.412633 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.514821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.514852 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.514862 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.514877 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.514886 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.597175 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.597229 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.597275 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:07 crc kubenswrapper[4791]: E1208 21:19:07.597313 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:07 crc kubenswrapper[4791]: E1208 21:19:07.597410 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:07 crc kubenswrapper[4791]: E1208 21:19:07.597499 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.616663 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.616894 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.616987 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.617055 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.617109 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.719234 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.719504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.719591 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.719731 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.719816 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.821184 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.821222 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.821231 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.821244 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.821257 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.824007 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.825718 4791 generic.go:334] "Generic (PLEG): container finished" podID="73955741-20a9-4a15-808b-c72dafba6dce" containerID="6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea" exitCode=0 Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.825729 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerDied","Data":"6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.834596 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.844267 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-8mkcc"] Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.846270 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.849655 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.850104 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.850284 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.851384 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.896591 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.914768 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.926337 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.926384 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.926392 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.926409 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.926456 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.926967 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/006ee595-d920-4e96-9c93-9283cf84a4de-serviceca\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.926949 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.927027 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q66px\" (UniqueName: \"kubernetes.io/projected/006ee595-d920-4e96-9c93-9283cf84a4de-kube-api-access-q66px\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.927054 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/006ee595-d920-4e96-9c93-9283cf84a4de-host\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.936047 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.945167 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.957699 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.978915 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:07 crc kubenswrapper[4791]: I1208 21:19:07.989519 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.000439 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.011034 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.021364 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.028765 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q66px\" (UniqueName: \"kubernetes.io/projected/006ee595-d920-4e96-9c93-9283cf84a4de-kube-api-access-q66px\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.028820 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/006ee595-d920-4e96-9c93-9283cf84a4de-host\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.028861 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/006ee595-d920-4e96-9c93-9283cf84a4de-serviceca\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.029047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/006ee595-d920-4e96-9c93-9283cf84a4de-host\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.030286 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/006ee595-d920-4e96-9c93-9283cf84a4de-serviceca\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.030420 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.030470 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.030483 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.030500 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.030510 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.033103 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.051866 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q66px\" (UniqueName: \"kubernetes.io/projected/006ee595-d920-4e96-9c93-9283cf84a4de-kube-api-access-q66px\") pod \"node-ca-8mkcc\" (UID: \"006ee595-d920-4e96-9c93-9283cf84a4de\") " pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.053731 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.071066 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.084803 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.095632 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.105023 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.117816 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.132809 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.132850 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.132859 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.132874 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.132884 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.135085 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.147857 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.158385 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.169365 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.183006 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.208080 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8mkcc" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.220236 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.235203 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.235256 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.235267 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.235287 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.235301 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.244485 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8mkcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"006ee595-d920-4e96-9c93-9283cf84a4de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q66px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8mkcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.289941 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.330516 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.338067 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.338123 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.338136 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.338151 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.338162 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.368970 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.440954 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.441310 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.441379 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.441510 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.441574 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: W1208 21:19:08.467559 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod006ee595_d920_4e96_9c93_9283cf84a4de.slice/crio-62fb34c360958480bcf7c6eb43f9b4d32c9b9147e864b6a1115acec9e9a2d678 WatchSource:0}: Error finding container 62fb34c360958480bcf7c6eb43f9b4d32c9b9147e864b6a1115acec9e9a2d678: Status 404 returned error can't find the container with id 62fb34c360958480bcf7c6eb43f9b4d32c9b9147e864b6a1115acec9e9a2d678 Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.543791 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.543854 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.543865 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.543886 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.543900 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.646126 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.646174 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.646186 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.646203 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.646215 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.749443 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.749492 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.749507 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.749524 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.749536 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.829367 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8mkcc" event={"ID":"006ee595-d920-4e96-9c93-9283cf84a4de","Type":"ContainerStarted","Data":"62fb34c360958480bcf7c6eb43f9b4d32c9b9147e864b6a1115acec9e9a2d678"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.851869 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.851974 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.851985 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.851999 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.852009 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.954684 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.954746 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.954758 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.954774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4791]: I1208 21:19:08.954785 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.057860 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.058231 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.058241 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.058253 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.058264 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.160249 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.160276 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.160284 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.160296 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.160304 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.262828 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.262880 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.262891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.262907 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.262919 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.365444 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.365483 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.365491 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.365505 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.365515 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.467686 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.467800 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.467829 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.467864 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.467894 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.571228 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.571291 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.571305 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.571327 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.571343 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.597688 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.597832 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:09 crc kubenswrapper[4791]: E1208 21:19:09.597951 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.597998 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:09 crc kubenswrapper[4791]: E1208 21:19:09.598149 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:09 crc kubenswrapper[4791]: E1208 21:19:09.598267 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.674792 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.674864 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.674880 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.674902 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.674917 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.778398 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.778464 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.778480 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.778508 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.778528 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.840259 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerStarted","Data":"2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.881506 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.881561 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.881572 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.881587 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.881596 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.954044 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:09 crc kubenswrapper[4791]: E1208 21:19:09.954305 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:17.95428792 +0000 UTC m=+34.653046275 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.983887 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.983964 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.983978 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.983999 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4791]: I1208 21:19:09.984013 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.054677 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.054781 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.054811 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.054829 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.054893 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.054937 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:18.054924643 +0000 UTC m=+34.753682988 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055131 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055203 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055231 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055233 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055284 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055295 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055357 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:18.055341323 +0000 UTC m=+34.754099668 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055363 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055376 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:18.055368934 +0000 UTC m=+34.754127279 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:10 crc kubenswrapper[4791]: E1208 21:19:10.055558 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:18.055507987 +0000 UTC m=+34.754266372 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.094800 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.094862 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.094879 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.094903 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.094920 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.198167 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.198242 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.198263 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.198383 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.198411 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.300662 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.300749 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.300764 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.300780 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.300791 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.404011 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.404038 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.404046 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.404060 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.404069 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.507103 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.507143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.507154 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.507169 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.507180 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.609067 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.609108 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.609118 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.609135 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.609145 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.711530 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.711559 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.711568 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.711581 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.711592 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.815761 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.815805 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.815823 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.815848 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.815869 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.847947 4791 generic.go:334] "Generic (PLEG): container finished" podID="73955741-20a9-4a15-808b-c72dafba6dce" containerID="2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1" exitCode=0 Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.847994 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerDied","Data":"2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.850329 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8mkcc" event={"ID":"006ee595-d920-4e96-9c93-9283cf84a4de","Type":"ContainerStarted","Data":"16239b36ef8239352cd9c9e2516f19638c8b567c35634ce043a4e0e41d29d1c1"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.854995 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerStarted","Data":"edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.855385 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.855554 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.863929 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.884234 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.885027 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.886897 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.901391 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.915199 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.919824 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.919892 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.919926 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.919951 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.919967 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.936192 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.948130 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.963187 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.972233 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8mkcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"006ee595-d920-4e96-9c93-9283cf84a4de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q66px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8mkcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.982761 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:10 crc kubenswrapper[4791]: I1208 21:19:10.992595 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.003976 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.015090 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.022350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.022388 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.022398 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.022417 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.022429 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.029179 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.041958 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.053629 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.063179 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.074017 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.095698 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.107671 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.117666 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.126887 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.127001 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.127023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.127071 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.127085 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.129851 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.144996 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.164538 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.174138 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8mkcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"006ee595-d920-4e96-9c93-9283cf84a4de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16239b36ef8239352cd9c9e2516f19638c8b567c35634ce043a4e0e41d29d1c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q66px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8mkcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.184454 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.193248 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.201597 4791 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.203299 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.212450 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.221029 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.229439 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.233124 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.233151 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.233162 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.233175 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.233184 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.335526 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.335565 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.335575 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.335589 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.335598 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.438876 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.438960 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.438979 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.439012 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.439032 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.541695 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.541760 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.541775 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.541793 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.541808 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.597021 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:11 crc kubenswrapper[4791]: E1208 21:19:11.597632 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.597050 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.597068 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:11 crc kubenswrapper[4791]: E1208 21:19:11.597991 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:11 crc kubenswrapper[4791]: E1208 21:19:11.598066 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.644950 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.644987 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.644995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.645009 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.645018 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.747049 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.747081 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.747092 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.747109 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.747122 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.850440 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.850495 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.850508 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.850525 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.850538 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.861977 4791 generic.go:334] "Generic (PLEG): container finished" podID="73955741-20a9-4a15-808b-c72dafba6dce" containerID="556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184" exitCode=0 Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.862019 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerDied","Data":"556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.862164 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.876251 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.888497 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.900815 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.911937 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.922984 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.940690 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.952774 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.952976 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.953011 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.953025 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.953046 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.953061 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.963373 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.973270 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:11 crc kubenswrapper[4791]: I1208 21:19:11.986901 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.010287 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.021506 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8mkcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"006ee595-d920-4e96-9c93-9283cf84a4de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16239b36ef8239352cd9c9e2516f19638c8b567c35634ce043a4e0e41d29d1c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q66px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8mkcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.031345 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.043743 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.053620 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.055381 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.055404 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.055413 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.055430 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.055440 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.162961 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.163001 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.163014 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.163033 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.163045 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.265879 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.265931 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.265943 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.265965 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.265979 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.368858 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.368895 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.368905 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.368921 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.368931 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.471725 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.471789 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.471803 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.471823 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.471839 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.574797 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.574834 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.574844 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.574860 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.574870 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.677505 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.677558 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.677572 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.677590 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.677603 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.780938 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.780980 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.780991 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.781007 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.781019 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.876089 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.876178 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" event={"ID":"73955741-20a9-4a15-808b-c72dafba6dce","Type":"ContainerStarted","Data":"b439c3680d0cbf84563f52f8a16a1a482361ca110de7dc537c64b494b994c27d"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.884293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.884373 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.884401 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.884434 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.884460 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.913589 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.931057 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.943159 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.956941 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.969087 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.987809 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.989817 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.989877 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.989897 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.989923 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4791]: I1208 21:19:12.989943 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.004604 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.022739 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.041633 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.076590 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.093675 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.093758 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.093776 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.093800 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.093814 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.105867 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439c3680d0cbf84563f52f8a16a1a482361ca110de7dc537c64b494b994c27d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.133129 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.150298 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8mkcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"006ee595-d920-4e96-9c93-9283cf84a4de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16239b36ef8239352cd9c9e2516f19638c8b567c35634ce043a4e0e41d29d1c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q66px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8mkcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.161287 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.172424 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.197429 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.197957 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.198060 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.198158 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.198280 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.301975 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.302027 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.302036 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.302053 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.302065 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.404683 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.404810 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.404836 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.404872 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.404895 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.508323 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.508382 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.508404 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.508431 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.508448 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.597090 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.597125 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:13 crc kubenswrapper[4791]: E1208 21:19:13.597235 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.597307 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:13 crc kubenswrapper[4791]: E1208 21:19:13.597576 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:13 crc kubenswrapper[4791]: E1208 21:19:13.598091 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.613154 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a515231e-0c05-47c4-8731-6fdc0792b2c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c27269d9eb25f7d681a2c653a09d62264d8fc2cd8500eb2c2037d1ef512bb852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52d9c79fb2719ba7607d76daa944d1b52fe2bf20792f2dc24df762372b7c25ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f95e8c46d330a46bccff4299a2cc5d753d2371a1889cddbcdc70f7da571915a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.613592 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.613643 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.613657 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.613678 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.613691 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.629564 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a8f87b8159488138c04574a665b2ae3f16031f6fc7791117aace5c6d4111b2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.655296 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.666445 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.676284 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.686482 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.695952 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.709044 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.717033 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8mkcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"006ee595-d920-4e96-9c93-9283cf84a4de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16239b36ef8239352cd9c9e2516f19638c8b567c35634ce043a4e0e41d29d1c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q66px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8mkcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.718000 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.718033 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.718045 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.718064 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.718079 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.730084 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.742647 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.754121 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.763882 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a898dfe85305f9e755a0fce9f420825f50668f1491e9b6da752649c02746ad3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sx4hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgd9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.778694 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73955741-20a9-4a15-808b-c72dafba6dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439c3680d0cbf84563f52f8a16a1a482361ca110de7dc537c64b494b994c27d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f928dbb347322a7929cb5102328b4879bc66bb53d9eacc0dc05eda071a6b66f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1055c8dd1dd1126bb9e1ee83fdca69cdfa1052be3d1674d53dec324bb08bcc1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c691b6fa2f57e004b1bb400e58bb3aa499612ce8bca27a2bf2dd2f45454e8c30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f4802030acccd617cb94a73400f6fff4fc8276f93decd2ddb9c820299afb4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2098190deaa3e3141de083af3f6204543b734baa5328cf569b484d9b093e17d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://556df10f44cb01b0ecb1266965fed9e1ffaed75bbc38e87f8ae7e88de5be1184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lp7z7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bpjxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.800592 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfx26\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dk8tz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.821065 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.821095 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.821105 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.821123 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.821134 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.881569 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8e35df7b10957a7b55658d4bee09eec359ccd9100b4ef6c798bf5248c92a187b"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.882071 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1f261cb80e7739a2b9e13803d2d5ca26eca08b1aae220bc317f7e7bb613e69fb"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.883821 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-np52c" event={"ID":"0a943687-2f86-4422-854f-ab38b351b8c1","Type":"ContainerStarted","Data":"d29d05b797de7e8ca0ebd01b8d853be0e4cdd647864c65459d40c3abd976ff8c"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.903025 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc451ac-cac3-4571-9ef2-8b22b3e20c32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a560d56b37dcaf5f26a1c58f78c02dd0f5fd9b222e696c15474acd25eba3ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57e43dc171b23fa1abb6f3baa41f8dfee855b0e85afc8a233c7d9ead4706301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://870672685b3bbcda0af97fd9af6d3f40fb9c9d57432f0736e64ea96a21ff1b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://468f39d9ddd6d74f616a2559f2204a65336bbb713c83c64ff511fd4126d30bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac2c3ef5a41d743dba41d84bbe4e89b64d8702b2d4565982be17fae194312abf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d57b4b60d9c35eb18a5976c42d5f38e0c8dfdb9c04f4e27585f00e57c3f27f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee716248fcf96836fa6fca32e47c04270b384be440d725e5083bf72fd897122\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8047c3513de0746992a77b9867a3a8709bfe81592ef9ca2483a753a0862fefe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.914548 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d7d5b0-bf26-4221-9933-1f2af688750f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.923551 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.924890 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.924952 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.924965 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.924986 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.924998 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.934182 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.942142 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-np52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a943687-2f86-4422-854f-ab38b351b8c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j87l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-np52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.952449 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-49gdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd95c042-30cb-438f-8e98-9aebe3ea93bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-94fd8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-49gdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.963589 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.973241 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e35df7b10957a7b55658d4bee09eec359ccd9100b4ef6c798bf5248c92a187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f261cb80e7739a2b9e13803d2d5ca26eca08b1aae220bc317f7e7bb613e69fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:13 crc kubenswrapper[4791]: I1208 21:19:13.981523 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.016488 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podStartSLOduration=12.016466721 podStartE2EDuration="12.016466721s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:13.999525323 +0000 UTC m=+30.698283668" watchObservedRunningTime="2025-12-08 21:19:14.016466721 +0000 UTC m=+30.715225066" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.026927 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.026949 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.026957 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.026968 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.026977 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.037021 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-bpjxc" podStartSLOduration=12.037000994 podStartE2EDuration="12.037000994s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.016869681 +0000 UTC m=+30.715628026" watchObservedRunningTime="2025-12-08 21:19:14.037000994 +0000 UTC m=+30.735759339" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.051692 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-8mkcc" podStartSLOduration=11.051674668 podStartE2EDuration="11.051674668s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.05131634 +0000 UTC m=+30.750074705" watchObservedRunningTime="2025-12-08 21:19:14.051674668 +0000 UTC m=+30.750433013" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.051878 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podStartSLOduration=12.051874353 podStartE2EDuration="12.051874353s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.037444724 +0000 UTC m=+30.736203079" watchObservedRunningTime="2025-12-08 21:19:14.051874353 +0000 UTC m=+30.750632698" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.064761 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=12.064740635 podStartE2EDuration="12.064740635s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.064681994 +0000 UTC m=+30.763440349" watchObservedRunningTime="2025-12-08 21:19:14.064740635 +0000 UTC m=+30.763498980" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.095310 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-49gdc" podStartSLOduration=12.095286882 podStartE2EDuration="12.095286882s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.094543965 +0000 UTC m=+30.793302320" watchObservedRunningTime="2025-12-08 21:19:14.095286882 +0000 UTC m=+30.794045237" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.102793 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt"] Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.103174 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.106886 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.108437 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.130001 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.130376 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.130400 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.130427 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.130461 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.139878 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=11.139847279 podStartE2EDuration="11.139847279s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.132132248 +0000 UTC m=+30.830890623" watchObservedRunningTime="2025-12-08 21:19:14.139847279 +0000 UTC m=+30.838605624" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.140828 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-2nxp5"] Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.141449 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:14 crc kubenswrapper[4791]: E1208 21:19:14.141539 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2nxp5" podUID="295d8719-2569-4b02-a1a7-3dd2a2b119a8" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.156330 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=13.156306676 podStartE2EDuration="13.156306676s" podCreationTimestamp="2025-12-08 21:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.155133968 +0000 UTC m=+30.853892313" watchObservedRunningTime="2025-12-08 21:19:14.156306676 +0000 UTC m=+30.855065021" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.200254 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3029a833-0233-44f8-aa63-554106e1e865-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.200315 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.200333 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbmf5\" (UniqueName: \"kubernetes.io/projected/3029a833-0233-44f8-aa63-554106e1e865-kube-api-access-jbmf5\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.200454 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmt5n\" (UniqueName: \"kubernetes.io/projected/295d8719-2569-4b02-a1a7-3dd2a2b119a8-kube-api-access-hmt5n\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.200517 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3029a833-0233-44f8-aa63-554106e1e865-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.200571 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3029a833-0233-44f8-aa63-554106e1e865-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.209984 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-np52c" podStartSLOduration=12.209947115 podStartE2EDuration="12.209947115s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:14.196364196 +0000 UTC m=+30.895122571" watchObservedRunningTime="2025-12-08 21:19:14.209947115 +0000 UTC m=+30.908705480" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.232927 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.232984 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.233000 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.233024 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.233039 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.301795 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3029a833-0233-44f8-aa63-554106e1e865-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.301893 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.301939 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbmf5\" (UniqueName: \"kubernetes.io/projected/3029a833-0233-44f8-aa63-554106e1e865-kube-api-access-jbmf5\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.301977 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmt5n\" (UniqueName: \"kubernetes.io/projected/295d8719-2569-4b02-a1a7-3dd2a2b119a8-kube-api-access-hmt5n\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.302014 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3029a833-0233-44f8-aa63-554106e1e865-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.302063 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3029a833-0233-44f8-aa63-554106e1e865-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: E1208 21:19:14.302081 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:14 crc kubenswrapper[4791]: E1208 21:19:14.302272 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs podName:295d8719-2569-4b02-a1a7-3dd2a2b119a8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:14.802243153 +0000 UTC m=+31.501001698 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs") pod "network-metrics-daemon-2nxp5" (UID: "295d8719-2569-4b02-a1a7-3dd2a2b119a8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.303062 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3029a833-0233-44f8-aa63-554106e1e865-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.303441 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3029a833-0233-44f8-aa63-554106e1e865-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.319913 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3029a833-0233-44f8-aa63-554106e1e865-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.324434 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmt5n\" (UniqueName: \"kubernetes.io/projected/295d8719-2569-4b02-a1a7-3dd2a2b119a8-kube-api-access-hmt5n\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.325351 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbmf5\" (UniqueName: \"kubernetes.io/projected/3029a833-0233-44f8-aa63-554106e1e865-kube-api-access-jbmf5\") pod \"ovnkube-control-plane-749d76644c-dn8tt\" (UID: \"3029a833-0233-44f8-aa63-554106e1e865\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.330133 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.336321 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.336368 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.336381 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.336402 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.336414 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.418955 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" Dec 08 21:19:14 crc kubenswrapper[4791]: W1208 21:19:14.433626 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3029a833_0233_44f8_aa63_554106e1e865.slice/crio-1ab5d17f3a16e4f5828fdbbef83c08f8d2863b26a4378ebcf33bb2609153ab9b WatchSource:0}: Error finding container 1ab5d17f3a16e4f5828fdbbef83c08f8d2863b26a4378ebcf33bb2609153ab9b: Status 404 returned error can't find the container with id 1ab5d17f3a16e4f5828fdbbef83c08f8d2863b26a4378ebcf33bb2609153ab9b Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.439320 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.439350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.439361 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.439381 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.439393 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.542560 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.542605 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.542615 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.542630 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.542640 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.645573 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.645611 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.645620 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.645645 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.645656 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.749475 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.749514 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.749523 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.749540 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.749552 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.806937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:14 crc kubenswrapper[4791]: E1208 21:19:14.807250 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:14 crc kubenswrapper[4791]: E1208 21:19:14.807391 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs podName:295d8719-2569-4b02-a1a7-3dd2a2b119a8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:15.807355046 +0000 UTC m=+32.506113391 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs") pod "network-metrics-daemon-2nxp5" (UID: "295d8719-2569-4b02-a1a7-3dd2a2b119a8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.853152 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.853250 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.853271 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.853307 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.853331 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.887937 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" event={"ID":"3029a833-0233-44f8-aa63-554106e1e865","Type":"ContainerStarted","Data":"1ab5d17f3a16e4f5828fdbbef83c08f8d2863b26a4378ebcf33bb2609153ab9b"} Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.955629 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.955981 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.956077 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.956164 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4791]: I1208 21:19:14.956240 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.058469 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.058513 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.058522 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.058536 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.058548 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.162441 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.162510 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.162526 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.162550 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.162567 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.265030 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.265133 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.265152 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.265182 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.265205 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.368411 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.368483 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.368504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.368542 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.368564 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.473844 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.473910 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.473932 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.473955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.473970 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.577174 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.577233 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.577246 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.577274 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.577289 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.597865 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.597928 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.597994 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.598099 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:15 crc kubenswrapper[4791]: E1208 21:19:15.598099 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:15 crc kubenswrapper[4791]: E1208 21:19:15.598240 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:15 crc kubenswrapper[4791]: E1208 21:19:15.598358 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2nxp5" podUID="295d8719-2569-4b02-a1a7-3dd2a2b119a8" Dec 08 21:19:15 crc kubenswrapper[4791]: E1208 21:19:15.598457 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.681197 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.681293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.681321 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.681356 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.681386 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.784609 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.784670 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.784692 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.784847 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.784871 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.817378 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:15 crc kubenswrapper[4791]: E1208 21:19:15.817554 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:15 crc kubenswrapper[4791]: E1208 21:19:15.817625 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs podName:295d8719-2569-4b02-a1a7-3dd2a2b119a8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:17.817604682 +0000 UTC m=+34.516363027 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs") pod "network-metrics-daemon-2nxp5" (UID: "295d8719-2569-4b02-a1a7-3dd2a2b119a8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.887674 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.887752 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.887769 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.887789 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.887805 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.893187 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" event={"ID":"3029a833-0233-44f8-aa63-554106e1e865","Type":"ContainerStarted","Data":"9cb2851b0b45129bf7209c0bc490488855a6474fb7786f5f4107a06eba33ae3b"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.893234 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" event={"ID":"3029a833-0233-44f8-aa63-554106e1e865","Type":"ContainerStarted","Data":"ada1373c493c05e24e8265a585d1155f84a3b4e10c00d77f80c7ae47143a32a2"} Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.915985 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dn8tt" podStartSLOduration=12.915960192 podStartE2EDuration="12.915960192s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:15.914024407 +0000 UTC m=+32.612782772" watchObservedRunningTime="2025-12-08 21:19:15.915960192 +0000 UTC m=+32.614718567" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.990441 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.990495 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.990507 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.990524 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4791]: I1208 21:19:15.990536 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.093989 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.094041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.094054 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.094072 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.094084 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.109782 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2nxp5"] Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.109939 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:16 crc kubenswrapper[4791]: E1208 21:19:16.110060 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2nxp5" podUID="295d8719-2569-4b02-a1a7-3dd2a2b119a8" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.197078 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.197440 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.197451 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.197467 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.197476 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.299802 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.299851 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.299864 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.299882 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.299898 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.403002 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.403047 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.403058 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.403074 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.403098 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.506104 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.506154 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.506167 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.506186 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.506198 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.609307 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.609363 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.609376 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.609396 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.609408 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.712283 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.712369 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.712390 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.712418 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.712442 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.815828 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.815866 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.815878 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.815894 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.815904 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.891866 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.891912 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.891923 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.891941 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.891952 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.947667 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq"] Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.948206 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.950682 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.950892 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.951057 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 08 21:19:16 crc kubenswrapper[4791]: I1208 21:19:16.951187 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.033533 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5acc1e38-8b00-4416-b2c1-3fe639b73db9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.033611 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5acc1e38-8b00-4416-b2c1-3fe639b73db9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.033647 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5acc1e38-8b00-4416-b2c1-3fe639b73db9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.033742 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5acc1e38-8b00-4416-b2c1-3fe639b73db9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.033793 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5acc1e38-8b00-4416-b2c1-3fe639b73db9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.134787 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5acc1e38-8b00-4416-b2c1-3fe639b73db9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.134941 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5acc1e38-8b00-4416-b2c1-3fe639b73db9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.135004 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5acc1e38-8b00-4416-b2c1-3fe639b73db9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.135053 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5acc1e38-8b00-4416-b2c1-3fe639b73db9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.135099 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5acc1e38-8b00-4416-b2c1-3fe639b73db9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.135612 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5acc1e38-8b00-4416-b2c1-3fe639b73db9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.135912 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5acc1e38-8b00-4416-b2c1-3fe639b73db9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.137817 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5acc1e38-8b00-4416-b2c1-3fe639b73db9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.143298 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5acc1e38-8b00-4416-b2c1-3fe639b73db9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.169982 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5acc1e38-8b00-4416-b2c1-3fe639b73db9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g7dzq\" (UID: \"5acc1e38-8b00-4416-b2c1-3fe639b73db9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.263166 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" Dec 08 21:19:17 crc kubenswrapper[4791]: W1208 21:19:17.281145 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5acc1e38_8b00_4416_b2c1_3fe639b73db9.slice/crio-b6727087ce6b88e7caed82c2b31603a59e5ffe944bbe110b85a8f1f4b2feea9d WatchSource:0}: Error finding container b6727087ce6b88e7caed82c2b31603a59e5ffe944bbe110b85a8f1f4b2feea9d: Status 404 returned error can't find the container with id b6727087ce6b88e7caed82c2b31603a59e5ffe944bbe110b85a8f1f4b2feea9d Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.597852 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.597852 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:17 crc kubenswrapper[4791]: E1208 21:19:17.598121 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2nxp5" podUID="295d8719-2569-4b02-a1a7-3dd2a2b119a8" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.597850 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.597891 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:17 crc kubenswrapper[4791]: E1208 21:19:17.599112 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:17 crc kubenswrapper[4791]: E1208 21:19:17.599246 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:17 crc kubenswrapper[4791]: E1208 21:19:17.599294 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.845196 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:17 crc kubenswrapper[4791]: E1208 21:19:17.845965 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:17 crc kubenswrapper[4791]: E1208 21:19:17.846077 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs podName:295d8719-2569-4b02-a1a7-3dd2a2b119a8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.846057341 +0000 UTC m=+38.544815686 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs") pod "network-metrics-daemon-2nxp5" (UID: "295d8719-2569-4b02-a1a7-3dd2a2b119a8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.902168 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" event={"ID":"5acc1e38-8b00-4416-b2c1-3fe639b73db9","Type":"ContainerStarted","Data":"013eb87bc83a301c247515eef3b3a38825d6b381519a36f002411d924ce27642"} Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.902220 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" event={"ID":"5acc1e38-8b00-4416-b2c1-3fe639b73db9","Type":"ContainerStarted","Data":"b6727087ce6b88e7caed82c2b31603a59e5ffe944bbe110b85a8f1f4b2feea9d"} Dec 08 21:19:17 crc kubenswrapper[4791]: I1208 21:19:17.927660 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g7dzq" podStartSLOduration=15.927635407 podStartE2EDuration="15.927635407s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:17.926672354 +0000 UTC m=+34.625430699" watchObservedRunningTime="2025-12-08 21:19:17.927635407 +0000 UTC m=+34.626393762" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.048298 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.049077 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:34.049029758 +0000 UTC m=+50.747788103 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.150230 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.150285 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.150320 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.150360 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150489 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150523 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150502 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150582 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150632 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:34.150612594 +0000 UTC m=+50.849370939 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150538 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150693 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:34.150665505 +0000 UTC m=+50.849423860 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150756 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:34.150730496 +0000 UTC m=+50.849488861 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150500 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150788 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150797 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:18 crc kubenswrapper[4791]: E1208 21:19:18.150824 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:34.150817278 +0000 UTC m=+50.849575623 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.383123 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.383262 4791 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.427924 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kn6mg"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.428282 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.430083 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7c86k"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.430426 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.431958 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.431968 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.432170 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.432961 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-d259k"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.433280 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.442571 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.443105 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.444736 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.444922 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445015 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445096 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445171 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445281 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445369 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445451 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445529 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.445956 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.446456 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cbzwp"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.446655 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.446803 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.447444 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.447545 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.447986 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.449009 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.449606 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.450030 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.450179 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.450610 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.451502 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.453498 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.453585 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.453807 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dmbnh"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.453989 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.454525 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.454981 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.455478 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.455512 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-vck6k"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.456448 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557475 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-audit-policies\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557522 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557541 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-oauth-config\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557559 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-etcd-serving-ca\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557574 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557589 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-client-ca\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557603 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-service-ca\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557617 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c0d7a26-0742-4593-9192-c667b71c30fb-serving-cert\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557633 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/36861fd8-1693-4ac3-b70d-571c891ddca3-machine-approver-tls\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557648 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j74c\" (UniqueName: \"kubernetes.io/projected/36861fd8-1693-4ac3-b70d-571c891ddca3-kube-api-access-7j74c\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557664 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-serving-cert\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557677 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557692 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-serving-cert\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557722 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c705ea37-240a-4fd8-9779-98bff52678ca-serving-cert\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557749 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/36861fd8-1693-4ac3-b70d-571c891ddca3-auth-proxy-config\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557764 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-config\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557779 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-etcd-client\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557806 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5c2d385-390f-4d6f-8243-6399bd793167-config\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557822 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d5c2d385-390f-4d6f-8243-6399bd793167-trusted-ca\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557836 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/10e8b274-a7ca-4b48-b5f5-7345a78cd074-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557850 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8c0d7a26-0742-4593-9192-c667b71c30fb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557877 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-trusted-ca-bundle\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557892 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sds94\" (UniqueName: \"kubernetes.io/projected/3804c897-c904-44f5-b8a3-04ead3e93ac4-kube-api-access-sds94\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557907 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bd7cdf9-9085-4702-8a95-f3f445783066-serving-cert\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557925 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-audit\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557940 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzrxc\" (UniqueName: \"kubernetes.io/projected/e273b49d-255d-434d-935a-38ba1a53c69a-kube-api-access-pzrxc\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557955 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557970 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36861fd8-1693-4ac3-b70d-571c891ddca3-config\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.557983 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-encryption-config\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558004 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-image-import-ca\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558018 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3804c897-c904-44f5-b8a3-04ead3e93ac4-audit-dir\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558036 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c68fa880-a66b-4c4f-9975-c66ac4ae4767-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4hfxg\" (UID: \"c68fa880-a66b-4c4f-9975-c66ac4ae4767\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558052 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdtm9\" (UniqueName: \"kubernetes.io/projected/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-kube-api-access-hdtm9\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558069 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfxc4\" (UniqueName: \"kubernetes.io/projected/10e8b274-a7ca-4b48-b5f5-7345a78cd074-kube-api-access-nfxc4\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558087 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg95w\" (UniqueName: \"kubernetes.io/projected/116929aa-dcdc-4e62-9a30-40ef84d80f4f-kube-api-access-wg95w\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558106 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbs85\" (UniqueName: \"kubernetes.io/projected/c68fa880-a66b-4c4f-9975-c66ac4ae4767-kube-api-access-nbs85\") pod \"cluster-samples-operator-665b6dd947-4hfxg\" (UID: \"c68fa880-a66b-4c4f-9975-c66ac4ae4767\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558122 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/116929aa-dcdc-4e62-9a30-40ef84d80f4f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558306 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e273b49d-255d-434d-935a-38ba1a53c69a-node-pullsecrets\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558349 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e273b49d-255d-434d-935a-38ba1a53c69a-audit-dir\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558588 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10e8b274-a7ca-4b48-b5f5-7345a78cd074-config\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558670 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/116929aa-dcdc-4e62-9a30-40ef84d80f4f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558785 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-serving-cert\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558823 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-config\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558852 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-config\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558900 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-oauth-serving-cert\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558946 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5c2d385-390f-4d6f-8243-6399bd793167-serving-cert\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.558998 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sdxh\" (UniqueName: \"kubernetes.io/projected/8c0d7a26-0742-4593-9192-c667b71c30fb-kube-api-access-7sdxh\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559050 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-config\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559111 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx9r5\" (UniqueName: \"kubernetes.io/projected/8bd7cdf9-9085-4702-8a95-f3f445783066-kube-api-access-xx9r5\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559139 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/10e8b274-a7ca-4b48-b5f5-7345a78cd074-images\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559189 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-client-ca\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559230 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/116929aa-dcdc-4e62-9a30-40ef84d80f4f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559274 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-encryption-config\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559311 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfnkh\" (UniqueName: \"kubernetes.io/projected/c705ea37-240a-4fd8-9779-98bff52678ca-kube-api-access-dfnkh\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559380 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-etcd-client\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.559460 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2b8h\" (UniqueName: \"kubernetes.io/projected/d5c2d385-390f-4d6f-8243-6399bd793167-kube-api-access-p2b8h\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.563815 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.564008 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.564204 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.564354 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.564521 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.564687 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.564862 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.564994 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.568625 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kn6mg"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.568791 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.568956 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569082 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569132 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569221 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569407 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569419 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569091 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569563 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569653 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.569852 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.577009 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.577236 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.577376 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.581424 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.582012 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.582178 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.582298 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.582443 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.582721 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.582936 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.583053 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.583254 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.583478 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.583722 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.583921 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.584050 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.584163 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.584756 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.585212 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.585406 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.585634 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.585645 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.585854 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.586118 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.586289 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.587798 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-p7dcz"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.588635 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zll8d"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.589095 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.590014 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.591446 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.591601 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.591755 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.591938 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.604771 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.610053 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w9zwt"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.610626 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.610994 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.611489 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.612556 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.626066 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.627457 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lk2tp"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.627879 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.654260 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.654327 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.654555 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.655094 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.655568 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.655723 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.655825 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.655959 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.656073 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.656187 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.656310 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.656701 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.656893 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.657016 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.657121 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.657229 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.657338 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.683864 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.684061 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.684737 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.685464 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.686341 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.686513 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688601 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10e8b274-a7ca-4b48-b5f5-7345a78cd074-config\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688657 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/116929aa-dcdc-4e62-9a30-40ef84d80f4f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688693 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688758 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-serving-cert\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688788 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/974635f6-7864-44fb-81df-ec9f404ea543-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688810 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-config\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688825 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688834 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-config\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688861 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688889 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-oauth-serving-cert\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688912 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5c2d385-390f-4d6f-8243-6399bd793167-serving-cert\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sdxh\" (UniqueName: \"kubernetes.io/projected/8c0d7a26-0742-4593-9192-c667b71c30fb-kube-api-access-7sdxh\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688959 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/974635f6-7864-44fb-81df-ec9f404ea543-metrics-tls\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.688967 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689018 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-config\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689049 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx9r5\" (UniqueName: \"kubernetes.io/projected/8bd7cdf9-9085-4702-8a95-f3f445783066-kube-api-access-xx9r5\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689069 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/10e8b274-a7ca-4b48-b5f5-7345a78cd074-images\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689076 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689091 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-client-ca\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689118 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/116929aa-dcdc-4e62-9a30-40ef84d80f4f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689157 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689179 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-encryption-config\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689209 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfnkh\" (UniqueName: \"kubernetes.io/projected/c705ea37-240a-4fd8-9779-98bff52678ca-kube-api-access-dfnkh\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689234 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-etcd-client\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689242 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689259 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689286 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swbcq\" (UniqueName: \"kubernetes.io/projected/974635f6-7864-44fb-81df-ec9f404ea543-kube-api-access-swbcq\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689326 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2b8h\" (UniqueName: \"kubernetes.io/projected/d5c2d385-390f-4d6f-8243-6399bd793167-kube-api-access-p2b8h\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689350 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-audit-policies\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689371 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689395 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689437 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-oauth-config\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689469 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689516 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-etcd-serving-ca\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689538 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689567 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-client-ca\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689590 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-service-ca\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689642 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c0d7a26-0742-4593-9192-c667b71c30fb-serving-cert\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689682 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/36861fd8-1693-4ac3-b70d-571c891ddca3-machine-approver-tls\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689723 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j74c\" (UniqueName: \"kubernetes.io/projected/36861fd8-1693-4ac3-b70d-571c891ddca3-kube-api-access-7j74c\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689758 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-dir\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689784 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-policies\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689805 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689808 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-serving-cert\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689834 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689862 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/974635f6-7864-44fb-81df-ec9f404ea543-trusted-ca\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689892 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-serving-cert\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689914 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c705ea37-240a-4fd8-9779-98bff52678ca-serving-cert\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689935 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/36861fd8-1693-4ac3-b70d-571c891ddca3-auth-proxy-config\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689957 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8c0d7a26-0742-4593-9192-c667b71c30fb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.689992 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-config\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690011 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-etcd-client\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690032 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5c2d385-390f-4d6f-8243-6399bd793167-config\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690052 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d5c2d385-390f-4d6f-8243-6399bd793167-trusted-ca\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690073 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/10e8b274-a7ca-4b48-b5f5-7345a78cd074-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690094 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690122 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnrth\" (UniqueName: \"kubernetes.io/projected/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-kube-api-access-lnrth\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690147 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-trusted-ca-bundle\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690219 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sds94\" (UniqueName: \"kubernetes.io/projected/3804c897-c904-44f5-b8a3-04ead3e93ac4-kube-api-access-sds94\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690243 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bd7cdf9-9085-4702-8a95-f3f445783066-serving-cert\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690266 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-audit\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690298 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-encryption-config\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690321 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzrxc\" (UniqueName: \"kubernetes.io/projected/e273b49d-255d-434d-935a-38ba1a53c69a-kube-api-access-pzrxc\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690349 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690386 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690439 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36861fd8-1693-4ac3-b70d-571c891ddca3-config\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690569 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-image-import-ca\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690614 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3804c897-c904-44f5-b8a3-04ead3e93ac4-audit-dir\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690669 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c68fa880-a66b-4c4f-9975-c66ac4ae4767-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4hfxg\" (UID: \"c68fa880-a66b-4c4f-9975-c66ac4ae4767\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690869 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3804c897-c904-44f5-b8a3-04ead3e93ac4-audit-dir\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690934 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.691250 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.691295 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10e8b274-a7ca-4b48-b5f5-7345a78cd074-config\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.691485 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36861fd8-1693-4ac3-b70d-571c891ddca3-config\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.691862 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.691869 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3804c897-c904-44f5-b8a3-04ead3e93ac4-audit-policies\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.692490 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdtm9\" (UniqueName: \"kubernetes.io/projected/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-kube-api-access-hdtm9\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.692553 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfxc4\" (UniqueName: \"kubernetes.io/projected/10e8b274-a7ca-4b48-b5f5-7345a78cd074-kube-api-access-nfxc4\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.692647 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/36861fd8-1693-4ac3-b70d-571c891ddca3-auth-proxy-config\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.690237 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.693880 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.691207 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.691281 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.710650 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.710987 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.711365 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-config\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.711800 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-image-import-ca\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.712449 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-oauth-serving-cert\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.712601 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg95w\" (UniqueName: \"kubernetes.io/projected/116929aa-dcdc-4e62-9a30-40ef84d80f4f-kube-api-access-wg95w\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.712798 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-5tc82"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.713344 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-hs66g"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.713660 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-wl6d7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.714880 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-serving-cert\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.715223 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-oauth-config\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.716359 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717075 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/10e8b274-a7ca-4b48-b5f5-7345a78cd074-images\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717147 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbs85\" (UniqueName: \"kubernetes.io/projected/c68fa880-a66b-4c4f-9975-c66ac4ae4767-kube-api-access-nbs85\") pod \"cluster-samples-operator-665b6dd947-4hfxg\" (UID: \"c68fa880-a66b-4c4f-9975-c66ac4ae4767\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717174 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/116929aa-dcdc-4e62-9a30-40ef84d80f4f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717202 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717237 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717263 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e273b49d-255d-434d-935a-38ba1a53c69a-node-pullsecrets\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717282 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e273b49d-255d-434d-935a-38ba1a53c69a-audit-dir\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717300 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717322 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.717548 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c0d7a26-0742-4593-9192-c667b71c30fb-serving-cert\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.718088 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8c0d7a26-0742-4593-9192-c667b71c30fb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.718933 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-config\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.719232 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.719384 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wl6d7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.719691 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.720214 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/36861fd8-1693-4ac3-b70d-571c891ddca3-machine-approver-tls\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.720920 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-audit\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.721022 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e273b49d-255d-434d-935a-38ba1a53c69a-node-pullsecrets\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.721419 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-config\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.721537 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-etcd-client\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.722291 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-config\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.722395 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e273b49d-255d-434d-935a-38ba1a53c69a-audit-dir\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.723218 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-service-ca\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.723419 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-client-ca\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.724085 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-client-ca\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.724889 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/116929aa-dcdc-4e62-9a30-40ef84d80f4f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.725139 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-etcd-serving-ca\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.726511 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5c2d385-390f-4d6f-8243-6399bd793167-serving-cert\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.726750 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-encryption-config\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.727264 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5c2d385-390f-4d6f-8243-6399bd793167-config\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.727520 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bd7cdf9-9085-4702-8a95-f3f445783066-serving-cert\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.727752 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.734278 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.735612 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.735755 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.735948 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e273b49d-255d-434d-935a-38ba1a53c69a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.738902 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-serving-cert\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.739666 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c68fa880-a66b-4c4f-9975-c66ac4ae4767-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4hfxg\" (UID: \"c68fa880-a66b-4c4f-9975-c66ac4ae4767\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.743508 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.744612 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.746283 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-serving-cert\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.748323 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3804c897-c904-44f5-b8a3-04ead3e93ac4-etcd-client\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.749742 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.750414 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/116929aa-dcdc-4e62-9a30-40ef84d80f4f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.750761 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.751502 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c705ea37-240a-4fd8-9779-98bff52678ca-serving-cert\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.752295 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d5c2d385-390f-4d6f-8243-6399bd793167-trusted-ca\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.753135 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.753428 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.753835 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.754184 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.770951 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.772086 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-trusted-ca-bundle\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.772741 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.773076 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.775191 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.776904 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.777390 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.778414 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.778526 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sh5nr"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.778760 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.779662 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.787606 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.788318 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.790213 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.794374 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.795548 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7c86k"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.796598 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cbzwp"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.797554 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.798549 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.798918 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-p7dcz"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.802250 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.803060 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.803274 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.804207 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-vck6k"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.804219 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.805215 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.805875 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.806112 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.806574 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.807033 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.807539 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.808037 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.808784 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.810045 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-2kt8c"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.810521 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.811168 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-7f84n"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.811840 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.813149 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.813649 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.814312 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-d259k"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.815255 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rdpmz"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.815937 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.816429 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.817591 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6fncw"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.818380 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.818671 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w9zwt"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.818918 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/974635f6-7864-44fb-81df-ec9f404ea543-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.818963 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819007 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/974635f6-7864-44fb-81df-ec9f404ea543-metrics-tls\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819051 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819089 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819114 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swbcq\" (UniqueName: \"kubernetes.io/projected/974635f6-7864-44fb-81df-ec9f404ea543-kube-api-access-swbcq\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819160 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819186 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819229 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-dir\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819254 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-policies\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819275 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/974635f6-7864-44fb-81df-ec9f404ea543-trusted-ca\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819330 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819354 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnrth\" (UniqueName: \"kubernetes.io/projected/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-kube-api-access-lnrth\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819458 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819485 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819516 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819546 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819596 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.819795 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-dir\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.822462 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.822659 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.822957 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.823091 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.824608 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.824992 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-policies\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.826053 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.827195 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/974635f6-7864-44fb-81df-ec9f404ea543-trusted-ca\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.828015 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wl6d7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.829304 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dmbnh"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.830699 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.831794 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.832717 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zll8d"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.833837 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.834815 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.835823 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.836827 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-xpmb4"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.837894 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.837907 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.839345 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sh5nr"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.841206 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e273b49d-255d-434d-935a-38ba1a53c69a-encryption-config\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.841398 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.843120 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sdxh\" (UniqueName: \"kubernetes.io/projected/8c0d7a26-0742-4593-9192-c667b71c30fb-kube-api-access-7sdxh\") pod \"openshift-config-operator-7777fb866f-d259k\" (UID: \"8c0d7a26-0742-4593-9192-c667b71c30fb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.843198 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2b8h\" (UniqueName: \"kubernetes.io/projected/d5c2d385-390f-4d6f-8243-6399bd793167-kube-api-access-p2b8h\") pod \"console-operator-58897d9998-dmbnh\" (UID: \"d5c2d385-390f-4d6f-8243-6399bd793167\") " pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.843172 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.843309 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.843623 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.844261 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.844334 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/10e8b274-a7ca-4b48-b5f5-7345a78cd074-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.844949 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/974635f6-7864-44fb-81df-ec9f404ea543-metrics-tls\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.845014 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.845575 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.846305 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/116929aa-dcdc-4e62-9a30-40ef84d80f4f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.847780 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.848575 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.848693 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lk2tp"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.851301 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.849066 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.854050 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-5tc82"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.855901 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.857214 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7f84n"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.861775 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rnxcp"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.865274 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.870210 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfnkh\" (UniqueName: \"kubernetes.io/projected/c705ea37-240a-4fd8-9779-98bff52678ca-kube-api-access-dfnkh\") pod \"route-controller-manager-6576b87f9c-p7gsq\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.871864 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6fncw"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.872203 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.876006 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.878643 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.882375 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rdpmz"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.884262 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-t5m7x"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.885319 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.885612 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xpmb4"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.888755 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rnxcp"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.891038 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.894103 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.896294 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt"] Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.910342 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.932053 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.953529 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.954009 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.971219 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.989366 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:18 crc kubenswrapper[4791]: I1208 21:19:18.991657 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.012755 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.051097 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg95w\" (UniqueName: \"kubernetes.io/projected/116929aa-dcdc-4e62-9a30-40ef84d80f4f-kube-api-access-wg95w\") pod \"cluster-image-registry-operator-dc59b4c8b-d99lw\" (UID: \"116929aa-dcdc-4e62-9a30-40ef84d80f4f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.069904 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sds94\" (UniqueName: \"kubernetes.io/projected/3804c897-c904-44f5-b8a3-04ead3e93ac4-kube-api-access-sds94\") pod \"apiserver-7bbb656c7d-gj9w7\" (UID: \"3804c897-c904-44f5-b8a3-04ead3e93ac4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.072054 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.091081 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.093366 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.107586 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.112743 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.135211 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.154621 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.171405 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.176952 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.190484 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: W1208 21:19:19.196357 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc705ea37_240a_4fd8_9779_98bff52678ca.slice/crio-6eb4c55eaf374e6d37946b209214939753e37410a64dddb7119e1e4d5892e963 WatchSource:0}: Error finding container 6eb4c55eaf374e6d37946b209214939753e37410a64dddb7119e1e4d5892e963: Status 404 returned error can't find the container with id 6eb4c55eaf374e6d37946b209214939753e37410a64dddb7119e1e4d5892e963 Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.217095 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.231590 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.233132 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dmbnh"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.238301 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.250986 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.270276 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.291487 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.311280 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.368586 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j74c\" (UniqueName: \"kubernetes.io/projected/36861fd8-1693-4ac3-b70d-571c891ddca3-kube-api-access-7j74c\") pod \"machine-approver-56656f9798-qf5jp\" (UID: \"36861fd8-1693-4ac3-b70d-571c891ddca3\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.372492 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-d259k"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.384460 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfxc4\" (UniqueName: \"kubernetes.io/projected/10e8b274-a7ca-4b48-b5f5-7345a78cd074-kube-api-access-nfxc4\") pod \"machine-api-operator-5694c8668f-7c86k\" (UID: \"10e8b274-a7ca-4b48-b5f5-7345a78cd074\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.385940 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzrxc\" (UniqueName: \"kubernetes.io/projected/e273b49d-255d-434d-935a-38ba1a53c69a-kube-api-access-pzrxc\") pod \"apiserver-76f77b778f-cbzwp\" (UID: \"e273b49d-255d-434d-935a-38ba1a53c69a\") " pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.408933 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.409630 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdtm9\" (UniqueName: \"kubernetes.io/projected/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-kube-api-access-hdtm9\") pod \"console-f9d7485db-vck6k\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:19 crc kubenswrapper[4791]: W1208 21:19:19.424178 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod116929aa_dcdc_4e62_9a30_40ef84d80f4f.slice/crio-2c270aa1de4294dabfe6c22c857f126a72b35dbf797991953bfd906c3ae7a78a WatchSource:0}: Error finding container 2c270aa1de4294dabfe6c22c857f126a72b35dbf797991953bfd906c3ae7a78a: Status 404 returned error can't find the container with id 2c270aa1de4294dabfe6c22c857f126a72b35dbf797991953bfd906c3ae7a78a Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.425934 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.430112 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbs85\" (UniqueName: \"kubernetes.io/projected/c68fa880-a66b-4c4f-9975-c66ac4ae4767-kube-api-access-nbs85\") pod \"cluster-samples-operator-665b6dd947-4hfxg\" (UID: \"c68fa880-a66b-4c4f-9975-c66ac4ae4767\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.454392 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx9r5\" (UniqueName: \"kubernetes.io/projected/8bd7cdf9-9085-4702-8a95-f3f445783066-kube-api-access-xx9r5\") pod \"controller-manager-879f6c89f-kn6mg\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.470576 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.470938 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.491385 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.511733 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.513494 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.529504 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.532109 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534083 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jxjd\" (UniqueName: \"kubernetes.io/projected/409596b6-1fa0-416d-b5a3-a06c2e36c15b-kube-api-access-4jxjd\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534124 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534150 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-certificates\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534180 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6zgk\" (UniqueName: \"kubernetes.io/projected/3857a937-d62e-4f66-b53c-7e466a6d5bff-kube-api-access-s6zgk\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534224 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7278\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-kube-api-access-z7278\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534245 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8sjd\" (UniqueName: \"kubernetes.io/projected/91289879-25be-460a-9639-cddfd77cd942-kube-api-access-z8sjd\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534288 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534326 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/097c1e37-dbfb-4e31-9c4d-561c6bed9933-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534344 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3857a937-d62e-4f66-b53c-7e466a6d5bff-serving-cert\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534373 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-service-ca-bundle\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534400 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534458 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/91289879-25be-460a-9639-cddfd77cd942-serving-cert\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534494 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534568 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-bound-sa-token\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534623 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-trusted-ca\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534647 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/097c1e37-dbfb-4e31-9c4d-561c6bed9933-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534669 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91289879-25be-460a-9639-cddfd77cd942-config\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534700 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-tls\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.534759 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-config\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: E1208 21:19:19.535983 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.03596228 +0000 UTC m=+36.734720625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.552025 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.571275 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.595757 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.599266 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.599329 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.599254 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.599347 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.610199 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.615057 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.631719 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.635669 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636485 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-tls\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636525 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-config\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636551 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdrk4\" (UniqueName: \"kubernetes.io/projected/12b03a53-7257-43c2-98b5-4fba9fa582cb-kube-api-access-xdrk4\") pod \"migrator-59844c95c7-jxmvh\" (UID: \"12b03a53-7257-43c2-98b5-4fba9fa582cb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636576 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mwb2\" (UniqueName: \"kubernetes.io/projected/839bdb0e-d6c5-4464-8e23-ee63845cf40f-kube-api-access-8mwb2\") pod \"control-plane-machine-set-operator-78cbb6b69f-68nlk\" (UID: \"839bdb0e-d6c5-4464-8e23-ee63845cf40f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636623 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/565d3927-29fc-4e86-8c1b-552a14386bc0-metrics-tls\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636662 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/839bdb0e-d6c5-4464-8e23-ee63845cf40f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-68nlk\" (UID: \"839bdb0e-d6c5-4464-8e23-ee63845cf40f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636692 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ee523989-5472-461e-8673-c5e80f5216e1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sh5nr\" (UID: \"ee523989-5472-461e-8673-c5e80f5216e1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636868 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cd3251b3-7787-4b6f-bcad-dc0738414547-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636899 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8eec1570-5f40-4b0d-9a0f-04e381eac889-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.636945 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jxjd\" (UniqueName: \"kubernetes.io/projected/409596b6-1fa0-416d-b5a3-a06c2e36c15b-kube-api-access-4jxjd\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: E1208 21:19:19.637072 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.137049503 +0000 UTC m=+36.835807848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.644600 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2xbl\" (UniqueName: \"kubernetes.io/projected/97356cb4-53ea-4094-a7ac-28cde046b53c-kube-api-access-x2xbl\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650578 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x24cp\" (UniqueName: \"kubernetes.io/projected/1789d3a9-3a3a-421c-ad72-cb08f45afa70-kube-api-access-x24cp\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650623 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650649 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-certificates\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650668 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqr94\" (UniqueName: \"kubernetes.io/projected/565d3927-29fc-4e86-8c1b-552a14386bc0-kube-api-access-dqr94\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650693 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/001d07d1-9cd0-4c97-bb65-29457e205813-images\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650729 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ec69aeaf-ae67-4d6f-8342-4d3e84db8f63-metrics-tls\") pod \"dns-operator-744455d44c-rdpmz\" (UID: \"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63\") " pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650764 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8sjd\" (UniqueName: \"kubernetes.io/projected/91289879-25be-460a-9639-cddfd77cd942-kube-api-access-z8sjd\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650786 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-signing-cabundle\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650809 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfxbk\" (UniqueName: \"kubernetes.io/projected/527bb8ce-24f8-4bcf-a100-457e11dac79d-kube-api-access-vfxbk\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650835 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1789d3a9-3a3a-421c-ad72-cb08f45afa70-apiservice-cert\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650882 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-mountpoint-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650920 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.650390 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cbzwp"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.649101 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-tls\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: E1208 21:19:19.651547 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.151524233 +0000 UTC m=+36.850282568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.652651 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-certificates\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.654398 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.660598 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.660936 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3857a937-d62e-4f66-b53c-7e466a6d5bff-serving-cert\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.661055 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-service-ca-bundle\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.661090 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sbfk\" (UniqueName: \"kubernetes.io/projected/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-kube-api-access-7sbfk\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.661132 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97356cb4-53ea-4094-a7ac-28cde046b53c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.661164 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/001d07d1-9cd0-4c97-bb65-29457e205813-proxy-tls\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.661184 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e41f56a-953a-4454-863e-566cf339ab08-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.661201 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-node-bootstrap-token\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.662882 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtmnm\" (UniqueName: \"kubernetes.io/projected/ee523989-5472-461e-8673-c5e80f5216e1-kube-api-access-dtmnm\") pod \"multus-admission-controller-857f4d67dd-sh5nr\" (UID: \"ee523989-5472-461e-8673-c5e80f5216e1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663699 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97356cb4-53ea-4094-a7ac-28cde046b53c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663754 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663784 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3251b3-7787-4b6f-bcad-dc0738414547-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663827 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3251b3-7787-4b6f-bcad-dc0738414547-config\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663832 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-service-ca-bundle\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663849 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7lf5\" (UniqueName: \"kubernetes.io/projected/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-kube-api-access-f7lf5\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663936 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.663986 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdgkn\" (UniqueName: \"kubernetes.io/projected/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-kube-api-access-kdgkn\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664012 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khl4h\" (UniqueName: \"kubernetes.io/projected/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-kube-api-access-khl4h\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664046 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-bound-sa-token\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664128 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-metrics-certs\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664164 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-trusted-ca\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664189 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/527bb8ce-24f8-4bcf-a100-457e11dac79d-secret-volume\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664217 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4blm5\" (UniqueName: \"kubernetes.io/projected/6b194866-8f4f-4a5c-bf04-117a87fd1836-kube-api-access-4blm5\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664264 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91289879-25be-460a-9639-cddfd77cd942-config\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664290 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-stats-auth\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664339 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-config\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664367 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-socket-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664414 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2lkn\" (UniqueName: \"kubernetes.io/projected/8256b973-cb50-4848-aabf-109537321b94-kube-api-access-k2lkn\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664439 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4q88\" (UniqueName: \"kubernetes.io/projected/549eb9c0-fe38-43ee-a589-daa378a20d48-kube-api-access-z4q88\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.664673 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a64d896-f396-4347-9e7a-091e9741b884-service-ca-bundle\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665049 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665096 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eec1570-5f40-4b0d-9a0f-04e381eac889-config\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665139 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1b46aaa-47dd-462d-835a-e688e19f4fca-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665168 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/527bb8ce-24f8-4bcf-a100-457e11dac79d-config-volume\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665194 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-proxy-tls\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665223 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d3897ba-b8f0-4eea-948f-74af9801350b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7dk77\" (UID: \"7d3897ba-b8f0-4eea-948f-74af9801350b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665278 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6zgk\" (UniqueName: \"kubernetes.io/projected/3857a937-d62e-4f66-b53c-7e466a6d5bff-kube-api-access-s6zgk\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665306 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-plugins-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7278\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-kube-api-access-z7278\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665390 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-registration-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665417 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1789d3a9-3a3a-421c-ad72-cb08f45afa70-webhook-cert\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665445 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-certs\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665502 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b194866-8f4f-4a5c-bf04-117a87fd1836-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665530 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mxlf\" (UniqueName: \"kubernetes.io/projected/7f42a609-f815-4f83-b09b-cf94f54e6581-kube-api-access-5mxlf\") pod \"ingress-canary-xpmb4\" (UID: \"7f42a609-f815-4f83-b09b-cf94f54e6581\") " pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665564 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/565d3927-29fc-4e86-8c1b-552a14386bc0-config-volume\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665593 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/097c1e37-dbfb-4e31-9c4d-561c6bed9933-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665637 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-client\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665661 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-ca\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665687 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8eec1570-5f40-4b0d-9a0f-04e381eac889-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665733 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1b46aaa-47dd-462d-835a-e688e19f4fca-srv-cert\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665762 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-ready\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665792 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmfc9\" (UniqueName: \"kubernetes.io/projected/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-kube-api-access-jmfc9\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665820 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665851 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/549eb9c0-fe38-43ee-a589-daa378a20d48-profile-collector-cert\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665892 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e41f56a-953a-4454-863e-566cf339ab08-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665926 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/91289879-25be-460a-9639-cddfd77cd942-serving-cert\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.665961 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b194866-8f4f-4a5c-bf04-117a87fd1836-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.666834 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3857a937-d62e-4f66-b53c-7e466a6d5bff-serving-cert\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.667829 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-serving-cert\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.667863 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7f42a609-f815-4f83-b09b-cf94f54e6581-cert\") pod \"ingress-canary-xpmb4\" (UID: \"7f42a609-f815-4f83-b09b-cf94f54e6581\") " pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.667887 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/001d07d1-9cd0-4c97-bb65-29457e205813-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.667925 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.667943 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-signing-key\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.667963 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjklc\" (UniqueName: \"kubernetes.io/projected/e1b46aaa-47dd-462d-835a-e688e19f4fca-kube-api-access-cjklc\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668050 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tnhh\" (UniqueName: \"kubernetes.io/projected/74d9b7de-2712-4320-b41a-1e5c91bd36e7-kube-api-access-8tnhh\") pod \"downloads-7954f5f757-wl6d7\" (UID: \"74d9b7de-2712-4320-b41a-1e5c91bd36e7\") " pod="openshift-console/downloads-7954f5f757-wl6d7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668091 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/549eb9c0-fe38-43ee-a589-daa378a20d48-srv-cert\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668112 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbxf8\" (UniqueName: \"kubernetes.io/projected/7d3897ba-b8f0-4eea-948f-74af9801350b-kube-api-access-vbxf8\") pod \"package-server-manager-789f6589d5-7dk77\" (UID: \"7d3897ba-b8f0-4eea-948f-74af9801350b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668135 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1789d3a9-3a3a-421c-ad72-cb08f45afa70-tmpfs\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668169 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668202 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-csi-data-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668220 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-service-ca\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668238 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-default-certificate\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668259 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgnn4\" (UniqueName: \"kubernetes.io/projected/ec69aeaf-ae67-4d6f-8342-4d3e84db8f63-kube-api-access-tgnn4\") pod \"dns-operator-744455d44c-rdpmz\" (UID: \"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63\") " pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668426 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-trusted-ca\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668484 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668531 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/097c1e37-dbfb-4e31-9c4d-561c6bed9933-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668554 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skbjt\" (UniqueName: \"kubernetes.io/projected/001d07d1-9cd0-4c97-bb65-29457e205813-kube-api-access-skbjt\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668808 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6m8l\" (UniqueName: \"kubernetes.io/projected/7e41f56a-953a-4454-863e-566cf339ab08-kube-api-access-v6m8l\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668837 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.668877 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxqkf\" (UniqueName: \"kubernetes.io/projected/4a64d896-f396-4347-9e7a-091e9741b884-kube-api-access-hxqkf\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.669546 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-config\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.670808 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3857a937-d62e-4f66-b53c-7e466a6d5bff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.670981 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.674642 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91289879-25be-460a-9639-cddfd77cd942-config\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.674659 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/097c1e37-dbfb-4e31-9c4d-561c6bed9933-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.674810 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/91289879-25be-460a-9639-cddfd77cd942-serving-cert\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.676729 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.677129 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.679430 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/097c1e37-dbfb-4e31-9c4d-561c6bed9933-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.682003 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.711027 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.732923 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.751512 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770337 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770594 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sbfk\" (UniqueName: \"kubernetes.io/projected/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-kube-api-access-7sbfk\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770617 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97356cb4-53ea-4094-a7ac-28cde046b53c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770635 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e41f56a-953a-4454-863e-566cf339ab08-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770651 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-node-bootstrap-token\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770667 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/001d07d1-9cd0-4c97-bb65-29457e205813-proxy-tls\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770684 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtmnm\" (UniqueName: \"kubernetes.io/projected/ee523989-5472-461e-8673-c5e80f5216e1-kube-api-access-dtmnm\") pod \"multus-admission-controller-857f4d67dd-sh5nr\" (UID: \"ee523989-5472-461e-8673-c5e80f5216e1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770703 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97356cb4-53ea-4094-a7ac-28cde046b53c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770735 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3251b3-7787-4b6f-bcad-dc0738414547-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770751 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770765 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3251b3-7787-4b6f-bcad-dc0738414547-config\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770780 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7lf5\" (UniqueName: \"kubernetes.io/projected/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-kube-api-access-f7lf5\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770800 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770816 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khl4h\" (UniqueName: \"kubernetes.io/projected/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-kube-api-access-khl4h\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770839 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdgkn\" (UniqueName: \"kubernetes.io/projected/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-kube-api-access-kdgkn\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770856 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-metrics-certs\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/527bb8ce-24f8-4bcf-a100-457e11dac79d-secret-volume\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770891 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4blm5\" (UniqueName: \"kubernetes.io/projected/6b194866-8f4f-4a5c-bf04-117a87fd1836-kube-api-access-4blm5\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770908 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-stats-auth\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770931 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-socket-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770954 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4q88\" (UniqueName: \"kubernetes.io/projected/549eb9c0-fe38-43ee-a589-daa378a20d48-kube-api-access-z4q88\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770970 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a64d896-f396-4347-9e7a-091e9741b884-service-ca-bundle\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.770986 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2lkn\" (UniqueName: \"kubernetes.io/projected/8256b973-cb50-4848-aabf-109537321b94-kube-api-access-k2lkn\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771003 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eec1570-5f40-4b0d-9a0f-04e381eac889-config\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771019 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771036 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1b46aaa-47dd-462d-835a-e688e19f4fca-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771051 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-proxy-tls\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771067 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d3897ba-b8f0-4eea-948f-74af9801350b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7dk77\" (UID: \"7d3897ba-b8f0-4eea-948f-74af9801350b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771083 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/527bb8ce-24f8-4bcf-a100-457e11dac79d-config-volume\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771099 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-plugins-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771124 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-registration-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771141 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1789d3a9-3a3a-421c-ad72-cb08f45afa70-webhook-cert\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771157 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-certs\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771174 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b194866-8f4f-4a5c-bf04-117a87fd1836-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771189 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mxlf\" (UniqueName: \"kubernetes.io/projected/7f42a609-f815-4f83-b09b-cf94f54e6581-kube-api-access-5mxlf\") pod \"ingress-canary-xpmb4\" (UID: \"7f42a609-f815-4f83-b09b-cf94f54e6581\") " pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771226 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/565d3927-29fc-4e86-8c1b-552a14386bc0-config-volume\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771243 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-client\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771259 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8eec1570-5f40-4b0d-9a0f-04e381eac889-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771273 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1b46aaa-47dd-462d-835a-e688e19f4fca-srv-cert\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771289 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-ready\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771306 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmfc9\" (UniqueName: \"kubernetes.io/projected/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-kube-api-access-jmfc9\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771330 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-ca\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771356 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/549eb9c0-fe38-43ee-a589-daa378a20d48-profile-collector-cert\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771380 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e41f56a-953a-4454-863e-566cf339ab08-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771402 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b194866-8f4f-4a5c-bf04-117a87fd1836-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771424 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7f42a609-f815-4f83-b09b-cf94f54e6581-cert\") pod \"ingress-canary-xpmb4\" (UID: \"7f42a609-f815-4f83-b09b-cf94f54e6581\") " pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771441 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/001d07d1-9cd0-4c97-bb65-29457e205813-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771461 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-serving-cert\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771484 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjklc\" (UniqueName: \"kubernetes.io/projected/e1b46aaa-47dd-462d-835a-e688e19f4fca-kube-api-access-cjklc\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771516 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-signing-key\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771534 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tnhh\" (UniqueName: \"kubernetes.io/projected/74d9b7de-2712-4320-b41a-1e5c91bd36e7-kube-api-access-8tnhh\") pod \"downloads-7954f5f757-wl6d7\" (UID: \"74d9b7de-2712-4320-b41a-1e5c91bd36e7\") " pod="openshift-console/downloads-7954f5f757-wl6d7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771550 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/549eb9c0-fe38-43ee-a589-daa378a20d48-srv-cert\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771566 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbxf8\" (UniqueName: \"kubernetes.io/projected/7d3897ba-b8f0-4eea-948f-74af9801350b-kube-api-access-vbxf8\") pod \"package-server-manager-789f6589d5-7dk77\" (UID: \"7d3897ba-b8f0-4eea-948f-74af9801350b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771581 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1789d3a9-3a3a-421c-ad72-cb08f45afa70-tmpfs\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771603 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-csi-data-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771620 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771656 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-default-certificate\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771673 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgnn4\" (UniqueName: \"kubernetes.io/projected/ec69aeaf-ae67-4d6f-8342-4d3e84db8f63-kube-api-access-tgnn4\") pod \"dns-operator-744455d44c-rdpmz\" (UID: \"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63\") " pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771688 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771720 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-service-ca\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771736 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skbjt\" (UniqueName: \"kubernetes.io/projected/001d07d1-9cd0-4c97-bb65-29457e205813-kube-api-access-skbjt\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771754 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6m8l\" (UniqueName: \"kubernetes.io/projected/7e41f56a-953a-4454-863e-566cf339ab08-kube-api-access-v6m8l\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771768 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771785 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxqkf\" (UniqueName: \"kubernetes.io/projected/4a64d896-f396-4347-9e7a-091e9741b884-kube-api-access-hxqkf\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771800 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-config\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771824 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdrk4\" (UniqueName: \"kubernetes.io/projected/12b03a53-7257-43c2-98b5-4fba9fa582cb-kube-api-access-xdrk4\") pod \"migrator-59844c95c7-jxmvh\" (UID: \"12b03a53-7257-43c2-98b5-4fba9fa582cb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.771842 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mwb2\" (UniqueName: \"kubernetes.io/projected/839bdb0e-d6c5-4464-8e23-ee63845cf40f-kube-api-access-8mwb2\") pod \"control-plane-machine-set-operator-78cbb6b69f-68nlk\" (UID: \"839bdb0e-d6c5-4464-8e23-ee63845cf40f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.774143 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.774527 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-ready\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.774918 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1789d3a9-3a3a-421c-ad72-cb08f45afa70-tmpfs\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.775346 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-csi-data-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.775948 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: E1208 21:19:19.776600 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.276573879 +0000 UTC m=+36.975332424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.776870 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.778609 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/001d07d1-9cd0-4c97-bb65-29457e205813-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.779496 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3251b3-7787-4b6f-bcad-dc0738414547-config\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.780219 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.780281 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-socket-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.781448 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a64d896-f396-4347-9e7a-091e9741b884-service-ca-bundle\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.782616 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1789d3a9-3a3a-421c-ad72-cb08f45afa70-webhook-cert\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783300 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-plugins-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783330 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/549eb9c0-fe38-43ee-a589-daa378a20d48-srv-cert\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783378 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-registration-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783457 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/565d3927-29fc-4e86-8c1b-552a14386bc0-metrics-tls\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783531 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/839bdb0e-d6c5-4464-8e23-ee63845cf40f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-68nlk\" (UID: \"839bdb0e-d6c5-4464-8e23-ee63845cf40f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783580 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cd3251b3-7787-4b6f-bcad-dc0738414547-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783602 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ee523989-5472-461e-8673-c5e80f5216e1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sh5nr\" (UID: \"ee523989-5472-461e-8673-c5e80f5216e1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783630 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8eec1570-5f40-4b0d-9a0f-04e381eac889-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783672 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2xbl\" (UniqueName: \"kubernetes.io/projected/97356cb4-53ea-4094-a7ac-28cde046b53c-kube-api-access-x2xbl\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783724 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x24cp\" (UniqueName: \"kubernetes.io/projected/1789d3a9-3a3a-421c-ad72-cb08f45afa70-kube-api-access-x24cp\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783748 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqr94\" (UniqueName: \"kubernetes.io/projected/565d3927-29fc-4e86-8c1b-552a14386bc0-kube-api-access-dqr94\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783780 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ec69aeaf-ae67-4d6f-8342-4d3e84db8f63-metrics-tls\") pod \"dns-operator-744455d44c-rdpmz\" (UID: \"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63\") " pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783855 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-signing-cabundle\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783869 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3251b3-7787-4b6f-bcad-dc0738414547-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.784012 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/001d07d1-9cd0-4c97-bb65-29457e205813-images\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.783526 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/001d07d1-9cd0-4c97-bb65-29457e205813-proxy-tls\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.784037 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfxbk\" (UniqueName: \"kubernetes.io/projected/527bb8ce-24f8-4bcf-a100-457e11dac79d-kube-api-access-vfxbk\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.784689 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1789d3a9-3a3a-421c-ad72-cb08f45afa70-apiservice-cert\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.784752 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-mountpoint-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.784917 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/001d07d1-9cd0-4c97-bb65-29457e205813-images\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.784989 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8256b973-cb50-4848-aabf-109537321b94-mountpoint-dir\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.785161 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-signing-cabundle\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.790112 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1b46aaa-47dd-462d-835a-e688e19f4fca-srv-cert\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.793092 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-default-certificate\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.796778 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.799233 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1b46aaa-47dd-462d-835a-e688e19f4fca-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.801059 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/549eb9c0-fe38-43ee-a589-daa378a20d48-profile-collector-cert\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.801638 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d3897ba-b8f0-4eea-948f-74af9801350b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7dk77\" (UID: \"7d3897ba-b8f0-4eea-948f-74af9801350b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.803986 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-stats-auth\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.804701 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/527bb8ce-24f8-4bcf-a100-457e11dac79d-secret-volume\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.805301 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97356cb4-53ea-4094-a7ac-28cde046b53c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.807427 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1789d3a9-3a3a-421c-ad72-cb08f45afa70-apiservice-cert\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.808319 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.808737 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-proxy-tls\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.808842 4791 request.go:700] Waited for 1.005473524s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.809634 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-signing-key\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.811284 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.822942 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.826274 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a64d896-f396-4347-9e7a-091e9741b884-metrics-certs\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.829198 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ee523989-5472-461e-8673-c5e80f5216e1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sh5nr\" (UID: \"ee523989-5472-461e-8673-c5e80f5216e1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.835393 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.851194 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.854416 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97356cb4-53ea-4094-a7ac-28cde046b53c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.871285 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.886024 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: E1208 21:19:19.887855 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.387832812 +0000 UTC m=+37.086591157 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.890414 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.904560 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/527bb8ce-24f8-4bcf-a100-457e11dac79d-config-volume\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.916325 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" event={"ID":"c705ea37-240a-4fd8-9779-98bff52678ca","Type":"ContainerStarted","Data":"9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.916379 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" event={"ID":"c705ea37-240a-4fd8-9779-98bff52678ca","Type":"ContainerStarted","Data":"6eb4c55eaf374e6d37946b209214939753e37410a64dddb7119e1e4d5892e963"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.917630 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.918152 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.923847 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" event={"ID":"3804c897-c904-44f5-b8a3-04ead3e93ac4","Type":"ContainerStarted","Data":"6c6aae4b3aa1ef5ee5ddcad3203e3ad0f91996b07c99ba05ca10ae8222f44452"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.925300 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dmbnh" event={"ID":"d5c2d385-390f-4d6f-8243-6399bd793167","Type":"ContainerStarted","Data":"cea5d9122caf2f24aa08d5e0cc61be3b7851f433bd435cd8f76189d638d75cd8"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.925331 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dmbnh" event={"ID":"d5c2d385-390f-4d6f-8243-6399bd793167","Type":"ContainerStarted","Data":"238e556ae13cc3215df05a518172237dae847d2b9da44d2218caa1f730b871bb"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.926102 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.927581 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" event={"ID":"116929aa-dcdc-4e62-9a30-40ef84d80f4f","Type":"ContainerStarted","Data":"c8329fd63fef254fa4ca250919be349debbc8087deaa0806ed8c7b8a45562d99"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.927601 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" event={"ID":"116929aa-dcdc-4e62-9a30-40ef84d80f4f","Type":"ContainerStarted","Data":"2c270aa1de4294dabfe6c22c857f126a72b35dbf797991953bfd906c3ae7a78a"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.932297 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.933060 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" event={"ID":"36861fd8-1693-4ac3-b70d-571c891ddca3","Type":"ContainerStarted","Data":"79309eac5cd22f7d217c161d644802dec0e1639651556f29a4361eba007a97e2"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.936760 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" event={"ID":"e273b49d-255d-434d-935a-38ba1a53c69a","Type":"ContainerStarted","Data":"77a654df99a2005459f576bafb671884dd63316d20305ec29ed10dbe0553916d"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.939102 4791 generic.go:334] "Generic (PLEG): container finished" podID="8c0d7a26-0742-4593-9192-c667b71c30fb" containerID="a5b8e327a429012d9ffa32f479ae7601dc8b9ebbd5ec1b4422b783112152fad4" exitCode=0 Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.939141 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" event={"ID":"8c0d7a26-0742-4593-9192-c667b71c30fb","Type":"ContainerDied","Data":"a5b8e327a429012d9ffa32f479ae7601dc8b9ebbd5ec1b4422b783112152fad4"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.939165 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" event={"ID":"8c0d7a26-0742-4593-9192-c667b71c30fb","Type":"ContainerStarted","Data":"36db916ff7726923a8139fc2778ae28d9b2817b9a8089fca90fcbd49c3a83a80"} Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.951918 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.954571 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kn6mg"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.955373 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-vck6k"] Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.972107 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.987493 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:19 crc kubenswrapper[4791]: E1208 21:19:19.987650 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.487616406 +0000 UTC m=+37.186374761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:19 crc kubenswrapper[4791]: I1208 21:19:19.988701 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:19 crc kubenswrapper[4791]: E1208 21:19:19.993839 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.493575326 +0000 UTC m=+37.192333671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:19.999362 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7c86k"] Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.001530 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.011261 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.014011 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b194866-8f4f-4a5c-bf04-117a87fd1836-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.025556 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b194866-8f4f-4a5c-bf04-117a87fd1836-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.030702 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.050770 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: W1208 21:19:20.062635 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10e8b274_a7ca_4b48_b5f5_7345a78cd074.slice/crio-2ead02c87b0f0ffa05b18cad08ab0c98bf67516e059f71722d7de0f1d69c9bd9 WatchSource:0}: Error finding container 2ead02c87b0f0ffa05b18cad08ab0c98bf67516e059f71722d7de0f1d69c9bd9: Status 404 returned error can't find the container with id 2ead02c87b0f0ffa05b18cad08ab0c98bf67516e059f71722d7de0f1d69c9bd9 Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.070387 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.077227 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8eec1570-5f40-4b0d-9a0f-04e381eac889-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.089766 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.090435 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.590382009 +0000 UTC m=+37.289140494 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.091053 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.091201 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.091614 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.591591508 +0000 UTC m=+37.290349853 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.111611 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.125409 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eec1570-5f40-4b0d-9a0f-04e381eac889-config\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.131626 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.150581 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.150874 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-dmbnh" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.160894 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e41f56a-953a-4454-863e-566cf339ab08-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.173846 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.193153 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.193266 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.194207 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.694181477 +0000 UTC m=+37.392939822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.194804 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.195730 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.695690383 +0000 UTC m=+37.394448728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.201029 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e41f56a-953a-4454-863e-566cf339ab08-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.213003 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.232232 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.250740 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.259206 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.271327 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.290675 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.296859 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.297001 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.796973931 +0000 UTC m=+37.495732276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.297142 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.297486 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.797477983 +0000 UTC m=+37.496236328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.298547 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/565d3927-29fc-4e86-8c1b-552a14386bc0-metrics-tls\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.312000 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.312817 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/565d3927-29fc-4e86-8c1b-552a14386bc0-config-volume\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.331782 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.351418 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.363525 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/839bdb0e-d6c5-4464-8e23-ee63845cf40f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-68nlk\" (UID: \"839bdb0e-d6c5-4464-8e23-ee63845cf40f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.371399 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.393936 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.398929 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.399434 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.899401897 +0000 UTC m=+37.598160242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.399622 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.400104 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:20.900087503 +0000 UTC m=+37.598845848 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.459762 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.463901 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.464058 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.464110 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.470688 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.471325 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ec69aeaf-ae67-4d6f-8342-4d3e84db8f63-metrics-tls\") pod \"dns-operator-744455d44c-rdpmz\" (UID: \"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63\") " pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.475865 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-client\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.490817 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.501222 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.501373 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-serving-cert\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.501539 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.001526415 +0000 UTC m=+37.700284760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.501642 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.502022 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.002014177 +0000 UTC m=+37.700772512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.510744 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.518996 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-config\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.534653 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.546131 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-ca\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.551312 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.558454 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-etcd-service-ca\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.571587 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.604319 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.605472 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.105412665 +0000 UTC m=+37.804171010 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.610371 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/974635f6-7864-44fb-81df-ec9f404ea543-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.630499 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swbcq\" (UniqueName: \"kubernetes.io/projected/974635f6-7864-44fb-81df-ec9f404ea543-kube-api-access-swbcq\") pod \"ingress-operator-5b745b69d9-sl8dd\" (UID: \"974635f6-7864-44fb-81df-ec9f404ea543\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.646625 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnrth\" (UniqueName: \"kubernetes.io/projected/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-kube-api-access-lnrth\") pod \"oauth-openshift-558db77b4-zll8d\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.652974 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.672659 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.691478 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.697399 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.702523 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7f42a609-f815-4f83-b09b-cf94f54e6581-cert\") pod \"ingress-canary-xpmb4\" (UID: \"7f42a609-f815-4f83-b09b-cf94f54e6581\") " pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.706389 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.707475 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.707915 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.207896752 +0000 UTC m=+37.906655097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.711541 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.731895 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.754798 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.773395 4791 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.774448 4791 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.774550 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-certs podName:fb7a4850-1fbe-4d61-8594-7527ef7b28b9 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.274524087 +0000 UTC m=+37.973282432 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-certs") pod "machine-config-server-t5m7x" (UID: "fb7a4850-1fbe-4d61-8594-7527ef7b28b9") : failed to sync secret cache: timed out waiting for the condition Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.779630 4791 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.779779 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-node-bootstrap-token podName:fb7a4850-1fbe-4d61-8594-7527ef7b28b9 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.279745599 +0000 UTC m=+37.978503944 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-node-bootstrap-token") pod "machine-config-server-t5m7x" (UID: "fb7a4850-1fbe-4d61-8594-7527ef7b28b9") : failed to sync secret cache: timed out waiting for the condition Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.791012 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.808272 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.808881 4791 request.go:700] Waited for 1.923241588s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-tls&limit=500&resourceVersion=0 Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.808980 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.308941465 +0000 UTC m=+38.007699810 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.811211 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.834232 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.854670 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.872294 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.890552 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.910924 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:20 crc kubenswrapper[4791]: E1208 21:19:20.911284 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.411269348 +0000 UTC m=+38.110027693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.911688 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.934538 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.949020 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"097e5d40b65bb413cd4b3e50e7fdc2f643bc3f690b6bc021babf3009911df81b"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.954422 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" event={"ID":"10e8b274-a7ca-4b48-b5f5-7345a78cd074","Type":"ContainerStarted","Data":"7f45903acaea51cac5f5b856c1bb8451e57431cda29a0ceb9b7536527b64b91f"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.954489 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" event={"ID":"10e8b274-a7ca-4b48-b5f5-7345a78cd074","Type":"ContainerStarted","Data":"7e123e7914dccf8cc5f1d9f62da64d37aa8dec4c2920521c5c68e907e7d3e1aa"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.954510 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" event={"ID":"10e8b274-a7ca-4b48-b5f5-7345a78cd074","Type":"ContainerStarted","Data":"2ead02c87b0f0ffa05b18cad08ab0c98bf67516e059f71722d7de0f1d69c9bd9"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.958512 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.961355 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd"] Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.964061 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" event={"ID":"36861fd8-1693-4ac3-b70d-571c891ddca3","Type":"ContainerStarted","Data":"635fa9876fe2a518c12e40e26ba9e36c11de932de0b759f4f9ea52b452336f34"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.964098 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" event={"ID":"36861fd8-1693-4ac3-b70d-571c891ddca3","Type":"ContainerStarted","Data":"857fb4cba3e9f7dfb6fb037d667457e772b54dd3f56180e8391d9555feb83329"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.966106 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" event={"ID":"c68fa880-a66b-4c4f-9975-c66ac4ae4767","Type":"ContainerStarted","Data":"bea6c8031e6ae632c8bf910a4c4e85ecfa110dde1fb01f79fd53abbaca1a37d7"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.966133 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" event={"ID":"c68fa880-a66b-4c4f-9975-c66ac4ae4767","Type":"ContainerStarted","Data":"c42f59bb386467633af07cb9ec4ca9822adb3366e109a4f95e5f2e4150d5dc76"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.966144 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" event={"ID":"c68fa880-a66b-4c4f-9975-c66ac4ae4767","Type":"ContainerStarted","Data":"dfa78ea0c9f5355b6c292036f57d974a778e58c2526488807cc1b746ae38c22b"} Dec 08 21:19:20 crc kubenswrapper[4791]: I1208 21:19:20.977185 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.008725 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" event={"ID":"8bd7cdf9-9085-4702-8a95-f3f445783066","Type":"ContainerStarted","Data":"780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3"} Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.008770 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" event={"ID":"8bd7cdf9-9085-4702-8a95-f3f445783066","Type":"ContainerStarted","Data":"98355b5ff0f37f5f69085547fb09cf3959e6d3aadb97e4f22ccde4fc9bcbc428"} Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.009839 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.011527 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.012916 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.512901245 +0000 UTC m=+38.211659590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.032206 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-vck6k" event={"ID":"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d","Type":"ContainerStarted","Data":"c8b446ed2b6ade0eca7b307baa94aa89649601ec5d3fe63477a0276f8f3e13db"} Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.032264 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-vck6k" event={"ID":"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d","Type":"ContainerStarted","Data":"b9d58ec7616f582daeecfebd87069bbd98c4bad9f8a360421e0ca724d933955e"} Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.034057 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jxjd\" (UniqueName: \"kubernetes.io/projected/409596b6-1fa0-416d-b5a3-a06c2e36c15b-kube-api-access-4jxjd\") pod \"marketplace-operator-79b997595-w9zwt\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.038005 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8sjd\" (UniqueName: \"kubernetes.io/projected/91289879-25be-460a-9639-cddfd77cd942-kube-api-access-z8sjd\") pod \"service-ca-operator-777779d784-8hq8w\" (UID: \"91289879-25be-460a-9639-cddfd77cd942\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.043735 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.059907 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zll8d"] Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.064070 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" event={"ID":"e273b49d-255d-434d-935a-38ba1a53c69a","Type":"ContainerDied","Data":"88130081ffe78f58c7d9442c13211c00f28ff9ae827a85dd052b8649c8f6c835"} Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.064029 4791 generic.go:334] "Generic (PLEG): container finished" podID="e273b49d-255d-434d-935a-38ba1a53c69a" containerID="88130081ffe78f58c7d9442c13211c00f28ff9ae827a85dd052b8649c8f6c835" exitCode=0 Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.075251 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7278\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-kube-api-access-z7278\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.083760 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6zgk\" (UniqueName: \"kubernetes.io/projected/3857a937-d62e-4f66-b53c-7e466a6d5bff-kube-api-access-s6zgk\") pod \"authentication-operator-69f744f599-p7dcz\" (UID: \"3857a937-d62e-4f66-b53c-7e466a6d5bff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.088084 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" event={"ID":"8c0d7a26-0742-4593-9192-c667b71c30fb","Type":"ContainerStarted","Data":"5a2a0399e45b5d7ec8091ea81d91e416085f36905bbd3f195923525d139647b7"} Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.088331 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.094112 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-bound-sa-token\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.105950 4791 generic.go:334] "Generic (PLEG): container finished" podID="3804c897-c904-44f5-b8a3-04ead3e93ac4" containerID="5a1055177b00fbf7919c1dbd50172b16393f0d24473edbfa4409419367225f86" exitCode=0 Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.106287 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" event={"ID":"3804c897-c904-44f5-b8a3-04ead3e93ac4","Type":"ContainerDied","Data":"5a1055177b00fbf7919c1dbd50172b16393f0d24473edbfa4409419367225f86"} Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.114337 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.115720 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.615687879 +0000 UTC m=+38.314446224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.131517 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mwb2\" (UniqueName: \"kubernetes.io/projected/839bdb0e-d6c5-4464-8e23-ee63845cf40f-kube-api-access-8mwb2\") pod \"control-plane-machine-set-operator-78cbb6b69f-68nlk\" (UID: \"839bdb0e-d6c5-4464-8e23-ee63845cf40f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.156848 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mxlf\" (UniqueName: \"kubernetes.io/projected/7f42a609-f815-4f83-b09b-cf94f54e6581-kube-api-access-5mxlf\") pod \"ingress-canary-xpmb4\" (UID: \"7f42a609-f815-4f83-b09b-cf94f54e6581\") " pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.179536 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbxf8\" (UniqueName: \"kubernetes.io/projected/7d3897ba-b8f0-4eea-948f-74af9801350b-kube-api-access-vbxf8\") pod \"package-server-manager-789f6589d5-7dk77\" (UID: \"7d3897ba-b8f0-4eea-948f-74af9801350b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.202763 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmfc9\" (UniqueName: \"kubernetes.io/projected/0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c-kube-api-access-jmfc9\") pod \"service-ca-9c57cc56f-5tc82\" (UID: \"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c\") " pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.207054 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgnn4\" (UniqueName: \"kubernetes.io/projected/ec69aeaf-ae67-4d6f-8342-4d3e84db8f63-kube-api-access-tgnn4\") pod \"dns-operator-744455d44c-rdpmz\" (UID: \"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63\") " pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.216248 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.218103 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.718078794 +0000 UTC m=+38.416837139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.227633 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skbjt\" (UniqueName: \"kubernetes.io/projected/001d07d1-9cd0-4c97-bb65-29457e205813-kube-api-access-skbjt\") pod \"machine-config-operator-74547568cd-pm6zw\" (UID: \"001d07d1-9cd0-4c97-bb65-29457e205813\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.248260 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6m8l\" (UniqueName: \"kubernetes.io/projected/7e41f56a-953a-4454-863e-566cf339ab08-kube-api-access-v6m8l\") pod \"openshift-apiserver-operator-796bbdcf4f-ktmqq\" (UID: \"7e41f56a-953a-4454-863e-566cf339ab08\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.258848 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.265077 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q8wd6\" (UID: \"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.288295 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.295418 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxqkf\" (UniqueName: \"kubernetes.io/projected/4a64d896-f396-4347-9e7a-091e9741b884-kube-api-access-hxqkf\") pod \"router-default-5444994796-hs66g\" (UID: \"4a64d896-f396-4347-9e7a-091e9741b884\") " pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.299941 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.307988 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdrk4\" (UniqueName: \"kubernetes.io/projected/12b03a53-7257-43c2-98b5-4fba9fa582cb-kube-api-access-xdrk4\") pod \"migrator-59844c95c7-jxmvh\" (UID: \"12b03a53-7257-43c2-98b5-4fba9fa582cb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.310261 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.312688 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.331487 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7lf5\" (UniqueName: \"kubernetes.io/projected/e9bbe11b-6bc7-4bb3-89cc-178d572202e1-kube-api-access-f7lf5\") pod \"machine-config-controller-84d6567774-rdmr7\" (UID: \"e9bbe11b-6bc7-4bb3-89cc-178d572202e1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.332098 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.334069 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xpmb4" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.334582 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-certs\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.334771 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.334835 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-node-bootstrap-token\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.338011 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.83798554 +0000 UTC m=+38.536743885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.340235 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-node-bootstrap-token\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.342305 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-certs\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.347789 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.362174 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.365950 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.372868 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tnhh\" (UniqueName: \"kubernetes.io/projected/74d9b7de-2712-4320-b41a-1e5c91bd36e7-kube-api-access-8tnhh\") pod \"downloads-7954f5f757-wl6d7\" (UID: \"74d9b7de-2712-4320-b41a-1e5c91bd36e7\") " pod="openshift-console/downloads-7954f5f757-wl6d7" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.379118 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wl6d7" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.384881 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.391520 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khl4h\" (UniqueName: \"kubernetes.io/projected/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-kube-api-access-khl4h\") pod \"cni-sysctl-allowlist-ds-2kt8c\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.395332 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sbfk\" (UniqueName: \"kubernetes.io/projected/fb7a4850-1fbe-4d61-8594-7527ef7b28b9-kube-api-access-7sbfk\") pod \"machine-config-server-t5m7x\" (UID: \"fb7a4850-1fbe-4d61-8594-7527ef7b28b9\") " pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.405653 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.421580 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.435966 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.436282 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.936263018 +0000 UTC m=+38.635021363 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.436201 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjklc\" (UniqueName: \"kubernetes.io/projected/e1b46aaa-47dd-462d-835a-e688e19f4fca-kube-api-access-cjklc\") pod \"olm-operator-6b444d44fb-rvvfm\" (UID: \"e1b46aaa-47dd-462d-835a-e688e19f4fca\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.436580 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.437116 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:21.937096788 +0000 UTC m=+38.635855133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.442366 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtmnm\" (UniqueName: \"kubernetes.io/projected/ee523989-5472-461e-8673-c5e80f5216e1-kube-api-access-dtmnm\") pod \"multus-admission-controller-857f4d67dd-sh5nr\" (UID: \"ee523989-5472-461e-8673-c5e80f5216e1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.444131 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.473794 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4q88\" (UniqueName: \"kubernetes.io/projected/549eb9c0-fe38-43ee-a589-daa378a20d48-kube-api-access-z4q88\") pod \"catalog-operator-68c6474976-zp8m4\" (UID: \"549eb9c0-fe38-43ee-a589-daa378a20d48\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.502156 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2lkn\" (UniqueName: \"kubernetes.io/projected/8256b973-cb50-4848-aabf-109537321b94-kube-api-access-k2lkn\") pod \"csi-hostpathplugin-rnxcp\" (UID: \"8256b973-cb50-4848-aabf-109537321b94\") " pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.529202 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.530376 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4blm5\" (UniqueName: \"kubernetes.io/projected/6b194866-8f4f-4a5c-bf04-117a87fd1836-kube-api-access-4blm5\") pod \"kube-storage-version-migrator-operator-b67b599dd-7rbn4\" (UID: \"6b194866-8f4f-4a5c-bf04-117a87fd1836\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.555287 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.555457 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.556335 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.056304137 +0000 UTC m=+38.755062482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.559623 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2xbl\" (UniqueName: \"kubernetes.io/projected/97356cb4-53ea-4094-a7ac-28cde046b53c-kube-api-access-x2xbl\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt8l7\" (UID: \"97356cb4-53ea-4094-a7ac-28cde046b53c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.574023 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.585634 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdgkn\" (UniqueName: \"kubernetes.io/projected/665b13a1-0c0e-4249-b7ec-f862eaeb3aea-kube-api-access-kdgkn\") pod \"etcd-operator-b45778765-6fncw\" (UID: \"665b13a1-0c0e-4249-b7ec-f862eaeb3aea\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.597489 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8eec1570-5f40-4b0d-9a0f-04e381eac889-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-d6h2c\" (UID: \"8eec1570-5f40-4b0d-9a0f-04e381eac889\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.619088 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x24cp\" (UniqueName: \"kubernetes.io/projected/1789d3a9-3a3a-421c-ad72-cb08f45afa70-kube-api-access-x24cp\") pod \"packageserver-d55dfcdfc-gnc4f\" (UID: \"1789d3a9-3a3a-421c-ad72-cb08f45afa70\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.627510 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.634971 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cd3251b3-7787-4b6f-bcad-dc0738414547-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4f4sg\" (UID: \"cd3251b3-7787-4b6f-bcad-dc0738414547\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.641107 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.645500 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfxbk\" (UniqueName: \"kubernetes.io/projected/527bb8ce-24f8-4bcf-a100-457e11dac79d-kube-api-access-vfxbk\") pod \"collect-profiles-29420475-jq6vt\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.654735 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.658551 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.659594 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.159574453 +0000 UTC m=+38.858332818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.664013 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-t5m7x" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.672653 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqr94\" (UniqueName: \"kubernetes.io/projected/565d3927-29fc-4e86-8c1b-552a14386bc0-kube-api-access-dqr94\") pod \"dns-default-7f84n\" (UID: \"565d3927-29fc-4e86-8c1b-552a14386bc0\") " pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.693614 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.712926 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.736270 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.760170 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.760272 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.260255297 +0000 UTC m=+38.959013642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.760490 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.760821 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.260811351 +0000 UTC m=+38.959569696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.766438 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-p7dcz"] Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.767033 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.801157 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.848243 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.861598 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.861949 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.862992 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.362944009 +0000 UTC m=+39.061702374 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.881284 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/295d8719-2569-4b02-a1a7-3dd2a2b119a8-metrics-certs\") pod \"network-metrics-daemon-2nxp5\" (UID: \"295d8719-2569-4b02-a1a7-3dd2a2b119a8\") " pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.889963 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.963668 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:21 crc kubenswrapper[4791]: E1208 21:19:21.964135 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.464122765 +0000 UTC m=+39.162881110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:21 crc kubenswrapper[4791]: I1208 21:19:21.994004 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qf5jp" podStartSLOduration=19.993978096 podStartE2EDuration="19.993978096s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:21.992386199 +0000 UTC m=+38.691144544" watchObservedRunningTime="2025-12-08 21:19:21.993978096 +0000 UTC m=+38.692736442" Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.042177 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2nxp5" Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.064526 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.065345 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.565322602 +0000 UTC m=+39.264080947 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.069599 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" podStartSLOduration=19.069579752 podStartE2EDuration="19.069579752s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:22.032055951 +0000 UTC m=+38.730814296" watchObservedRunningTime="2025-12-08 21:19:22.069579752 +0000 UTC m=+38.768338097" Dec 08 21:19:22 crc kubenswrapper[4791]: W1208 21:19:22.087664 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3857a937_d62e_4f66_b53c_7e466a6d5bff.slice/crio-b59e50131e342b1b8986343ce522122e729bd1d3bb9067a447dd8cd657c983eb WatchSource:0}: Error finding container b59e50131e342b1b8986343ce522122e729bd1d3bb9067a447dd8cd657c983eb: Status 404 returned error can't find the container with id b59e50131e342b1b8986343ce522122e729bd1d3bb9067a447dd8cd657c983eb Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.152293 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" event={"ID":"974635f6-7864-44fb-81df-ec9f404ea543","Type":"ContainerStarted","Data":"4bd4bcba4b9eb4537dd4f3d4629ca6eb7ac18bec4f88e6bc05a8f2f4966b0317"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.152343 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" event={"ID":"974635f6-7864-44fb-81df-ec9f404ea543","Type":"ContainerStarted","Data":"2e1e81b3f1ef8d2194e6223bd1b3ec2fd64580c3d09c74ff9a2fc2867ba0d595"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.152370 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" event={"ID":"974635f6-7864-44fb-81df-ec9f404ea543","Type":"ContainerStarted","Data":"14a4d065476029b3268fe8e7ab538573494e0ab17811fdcba20b12de0a8cd9e6"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.154682 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-dmbnh" podStartSLOduration=20.15465497 podStartE2EDuration="20.15465497s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:22.152556521 +0000 UTC m=+38.851314876" watchObservedRunningTime="2025-12-08 21:19:22.15465497 +0000 UTC m=+38.853413315" Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.165865 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.166162 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.66615122 +0000 UTC m=+39.364909565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.195887 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" event={"ID":"e273b49d-255d-434d-935a-38ba1a53c69a","Type":"ContainerStarted","Data":"9860fc22b72868dd0fb5ec946b0759ae1330f84f26b3fd61c459d938c89ded56"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.197002 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hs66g" event={"ID":"4a64d896-f396-4347-9e7a-091e9741b884","Type":"ContainerStarted","Data":"603a7a1de26817b95c06640158ff123c6ffe05e26641dc8188c75d90a696ad23"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.233207 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" event={"ID":"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9","Type":"ContainerStarted","Data":"72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.233260 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" event={"ID":"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9","Type":"ContainerStarted","Data":"092b6f4799b7d04a5e3e2bb92ad8083044c658f3b3f26a76f330a9fa255f5cbb"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.234206 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.264989 4791 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zll8d container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" start-of-body= Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.265060 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" podUID="ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.267377 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.268534 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.768508024 +0000 UTC m=+39.467266369 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.326091 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" event={"ID":"3857a937-d62e-4f66-b53c-7e466a6d5bff","Type":"ContainerStarted","Data":"b59e50131e342b1b8986343ce522122e729bd1d3bb9067a447dd8cd657c983eb"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.342609 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-t5m7x" event={"ID":"fb7a4850-1fbe-4d61-8594-7527ef7b28b9","Type":"ContainerStarted","Data":"a3bf13d4a510d2f05a98335582257409c204e6bbd5679818eed529b400044745"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.368848 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" event={"ID":"848a44d0-a0f4-48c2-9e4a-f0a4d3329815","Type":"ContainerStarted","Data":"4b64e78bcd4908bd1a813eb1d3bc6bff6c8a67fbfa59fcc4a60dbbdb07f6f166"} Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.372453 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.375396 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.875382234 +0000 UTC m=+39.574140579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.473081 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.479460 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:22.979434068 +0000 UTC m=+39.678192423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.579104 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.579724 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.079679852 +0000 UTC m=+39.778438277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.640321 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" podStartSLOduration=20.640304886 podStartE2EDuration="20.640304886s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:22.638962004 +0000 UTC m=+39.337720349" watchObservedRunningTime="2025-12-08 21:19:22.640304886 +0000 UTC m=+39.339063231" Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.680198 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.680522 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.18050665 +0000 UTC m=+39.879264995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.776694 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq"] Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.781444 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.781806 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.281791579 +0000 UTC m=+39.980549924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.803201 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xpmb4"] Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.883025 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.884154 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.38403143 +0000 UTC m=+40.082789775 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:22 crc kubenswrapper[4791]: I1208 21:19:22.986601 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:22 crc kubenswrapper[4791]: E1208 21:19:22.987116 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.487100041 +0000 UTC m=+40.185858386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.082163 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" podStartSLOduration=21.082133463 podStartE2EDuration="21.082133463s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:23.053429468 +0000 UTC m=+39.752187833" watchObservedRunningTime="2025-12-08 21:19:23.082133463 +0000 UTC m=+39.780891808" Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.088244 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.088398 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.588368889 +0000 UTC m=+40.287127234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.088603 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.089257 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.58924752 +0000 UTC m=+40.288005865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.104265 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-d99lw" podStartSLOduration=20.104237392 podStartE2EDuration="20.104237392s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:23.083165917 +0000 UTC m=+39.781924262" watchObservedRunningTime="2025-12-08 21:19:23.104237392 +0000 UTC m=+39.802995737" Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.191969 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.194035 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.69400179 +0000 UTC m=+40.392760135 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.194902 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.195458 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.695432083 +0000 UTC m=+40.394190588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.236857 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4hfxg" podStartSLOduration=21.236825845 podStartE2EDuration="21.236825845s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:23.230420784 +0000 UTC m=+39.929179129" watchObservedRunningTime="2025-12-08 21:19:23.236825845 +0000 UTC m=+39.935584190" Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.297680 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.297813 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.797791516 +0000 UTC m=+40.496549861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.298270 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.298640 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.798630306 +0000 UTC m=+40.497388651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.401190 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.406015 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.905975217 +0000 UTC m=+40.604733562 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.408438 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.410379 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:23.91036529 +0000 UTC m=+40.609123635 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.520095 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.520443 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.020427145 +0000 UTC m=+40.719185490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.537334 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" event={"ID":"848a44d0-a0f4-48c2-9e4a-f0a4d3329815","Type":"ContainerStarted","Data":"809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.538098 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.548663 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7"] Dec 08 21:19:23 crc kubenswrapper[4791]: W1208 21:19:23.617075 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec69aeaf_ae67_4d6f_8342_4d3e84db8f63.slice/crio-f7e160245672566d83f7e4c03a08f775f42b679f65ed00640f38d6adf5b68523 WatchSource:0}: Error finding container f7e160245672566d83f7e4c03a08f775f42b679f65ed00640f38d6adf5b68523: Status 404 returned error can't find the container with id f7e160245672566d83f7e4c03a08f775f42b679f65ed00640f38d6adf5b68523 Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.621551 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.621952 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.121936249 +0000 UTC m=+40.820694594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: W1208 21:19:23.630680 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod409596b6_1fa0_416d_b5a3_a06c2e36c15b.slice/crio-5aad25904232fdaaa877cc4f3b26c511e4dc7dfdcdc136eea6062a8250593b45 WatchSource:0}: Error finding container 5aad25904232fdaaa877cc4f3b26c511e4dc7dfdcdc136eea6062a8250593b45: Status 404 returned error can't find the container with id 5aad25904232fdaaa877cc4f3b26c511e4dc7dfdcdc136eea6062a8250593b45 Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.645795 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rdpmz"] Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.645829 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w9zwt"] Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.647048 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" event={"ID":"e273b49d-255d-434d-935a-38ba1a53c69a","Type":"ContainerStarted","Data":"56e8f20763d098cb061cb1679b5844aa62cee25067fcaace3b765a0b775da714"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.654724 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6fncw"] Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.670164 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-7c86k" podStartSLOduration=20.670146121 podStartE2EDuration="20.670146121s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:23.668160255 +0000 UTC m=+40.366918600" watchObservedRunningTime="2025-12-08 21:19:23.670146121 +0000 UTC m=+40.368904466" Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.693787 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xpmb4" event={"ID":"7f42a609-f815-4f83-b09b-cf94f54e6581","Type":"ContainerStarted","Data":"0dd913687ba2df2bbd938910e6f6e90af7d122caa2ef77be75b272bffb1f372d"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.716323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hs66g" event={"ID":"4a64d896-f396-4347-9e7a-091e9741b884","Type":"ContainerStarted","Data":"54a9cd5770c13c87f46e8a9f09070cb23a29e73c1f76439d6b1f12bfebfa7868"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.725413 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.727432 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.227408396 +0000 UTC m=+40.926166931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.744344 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" event={"ID":"7e41f56a-953a-4454-863e-566cf339ab08","Type":"ContainerStarted","Data":"bb881105398fffe923f7a7e5c7b176602bfe5da5957b3140f8a67cfd0d447541"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.785914 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" event={"ID":"3857a937-d62e-4f66-b53c-7e466a6d5bff","Type":"ContainerStarted","Data":"6c37654fff1094b38b99ffb5dfa523f7a1ec1db76dcc4d18eb3c4ad4a296a334"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.824998 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w"] Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.829572 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.831266 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.331251135 +0000 UTC m=+41.030009480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.843127 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" event={"ID":"3804c897-c904-44f5-b8a3-04ead3e93ac4","Type":"ContainerStarted","Data":"4aa5e8cbee1bd8ff8d28703e925a6db93e06618b22a7b8062780b9c0ec8296e3"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.850527 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk"] Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.859258 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg"] Dec 08 21:19:23 crc kubenswrapper[4791]: W1208 21:19:23.892387 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91289879_25be_460a_9639_cddfd77cd942.slice/crio-5a7185abca988a4968625fbfbceaea050215a58252e8fccf0c077f7d7c44d1c7 WatchSource:0}: Error finding container 5a7185abca988a4968625fbfbceaea050215a58252e8fccf0c077f7d7c44d1c7: Status 404 returned error can't find the container with id 5a7185abca988a4968625fbfbceaea050215a58252e8fccf0c077f7d7c44d1c7 Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.893018 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-t5m7x" event={"ID":"fb7a4850-1fbe-4d61-8594-7527ef7b28b9","Type":"ContainerStarted","Data":"cca0438c3392a5b024db1d4576f377334f417c59b0e25586c472dfbf470d9434"} Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.928946 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw"] Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.987989 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-vck6k" podStartSLOduration=21.987973046 podStartE2EDuration="21.987973046s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:23.874400858 +0000 UTC m=+40.573159203" watchObservedRunningTime="2025-12-08 21:19:23.987973046 +0000 UTC m=+40.686731391" Dec 08 21:19:23 crc kubenswrapper[4791]: I1208 21:19:23.990488 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:23 crc kubenswrapper[4791]: E1208 21:19:23.991821 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.491803776 +0000 UTC m=+41.190562121 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.010151 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.020343 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.031115 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.047919 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wl6d7"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.050514 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.071866 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.092941 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.102921 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.602896165 +0000 UTC m=+41.301654500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.128886 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" podStartSLOduration=6.128862385 podStartE2EDuration="6.128862385s" podCreationTimestamp="2025-12-08 21:19:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.072057221 +0000 UTC m=+40.770815566" watchObservedRunningTime="2025-12-08 21:19:24.128862385 +0000 UTC m=+40.827620730" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.153376 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.162777 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-5tc82"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.170066 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" podStartSLOduration=21.170041872 podStartE2EDuration="21.170041872s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.135173943 +0000 UTC m=+40.833932288" watchObservedRunningTime="2025-12-08 21:19:24.170041872 +0000 UTC m=+40.868800217" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.174803 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sh5nr"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.176290 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-p7dcz" podStartSLOduration=21.176278668 podStartE2EDuration="21.176278668s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.160412366 +0000 UTC m=+40.859170731" watchObservedRunningTime="2025-12-08 21:19:24.176278668 +0000 UTC m=+40.875037013" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.195510 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.198309 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.698280095 +0000 UTC m=+41.397038440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.201291 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.242350 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.242962 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.258496 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rnxcp"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.280000 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" podStartSLOduration=21.279980574 podStartE2EDuration="21.279980574s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.272808505 +0000 UTC m=+40.971566850" watchObservedRunningTime="2025-12-08 21:19:24.279980574 +0000 UTC m=+40.978738909" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.281632 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2nxp5"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.287177 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7f84n"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.299207 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.300032 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.799568034 +0000 UTC m=+41.498326379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.322386 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.322446 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.322458 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.324765 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm"] Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.383038 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl8dd" podStartSLOduration=21.383004393 podStartE2EDuration="21.383004393s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.372486726 +0000 UTC m=+41.071245071" watchObservedRunningTime="2025-12-08 21:19:24.383004393 +0000 UTC m=+41.081762738" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.385093 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.389526 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.392575 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:24 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:24 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:24 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.392844 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.400388 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.400906 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:24.900889603 +0000 UTC m=+41.599647948 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.428579 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.428984 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.453677 4791 patch_prober.go:28] interesting pod/apiserver-76f77b778f-cbzwp container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]log ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]etcd ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/generic-apiserver-start-informers ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/max-in-flight-filter ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 08 21:19:24 crc kubenswrapper[4791]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 08 21:19:24 crc kubenswrapper[4791]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/project.openshift.io-projectcache ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/openshift.io-startinformers ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 08 21:19:24 crc kubenswrapper[4791]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 08 21:19:24 crc kubenswrapper[4791]: livez check failed Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.453758 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" podUID="e273b49d-255d-434d-935a-38ba1a53c69a" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:24 crc kubenswrapper[4791]: W1208 21:19:24.465112 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b194866_8f4f_4a5c_bf04_117a87fd1836.slice/crio-7631d8d383ac1fb9c1511cad63f4d86cd85181da646c53cf5241c63d5eaa5ee1 WatchSource:0}: Error finding container 7631d8d383ac1fb9c1511cad63f4d86cd85181da646c53cf5241c63d5eaa5ee1: Status 404 returned error can't find the container with id 7631d8d383ac1fb9c1511cad63f4d86cd85181da646c53cf5241c63d5eaa5ee1 Dec 08 21:19:24 crc kubenswrapper[4791]: W1208 21:19:24.475839 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12b03a53_7257_43c2_98b5_4fba9fa582cb.slice/crio-66da77cdc955990045a64955165dd7d543755ea81a575d6ad2dd66c1c56affda WatchSource:0}: Error finding container 66da77cdc955990045a64955165dd7d543755ea81a575d6ad2dd66c1c56affda: Status 404 returned error can't find the container with id 66da77cdc955990045a64955165dd7d543755ea81a575d6ad2dd66c1c56affda Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.485364 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" podStartSLOduration=21.485343217 podStartE2EDuration="21.485343217s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.455343842 +0000 UTC m=+41.154102197" watchObservedRunningTime="2025-12-08 21:19:24.485343217 +0000 UTC m=+41.184101562" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.488291 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" podStartSLOduration=21.488281836 podStartE2EDuration="21.488281836s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.483667297 +0000 UTC m=+41.182425642" watchObservedRunningTime="2025-12-08 21:19:24.488281836 +0000 UTC m=+41.187040181" Dec 08 21:19:24 crc kubenswrapper[4791]: W1208 21:19:24.496392 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod527bb8ce_24f8_4bcf_a100_457e11dac79d.slice/crio-773e88a83d7a93cbad5a1708268eb559e68704544c012d3248a01d5b1435b9e2 WatchSource:0}: Error finding container 773e88a83d7a93cbad5a1708268eb559e68704544c012d3248a01d5b1435b9e2: Status 404 returned error can't find the container with id 773e88a83d7a93cbad5a1708268eb559e68704544c012d3248a01d5b1435b9e2 Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.501356 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.502125 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.002111841 +0000 UTC m=+41.700870186 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.543992 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-hs66g" podStartSLOduration=21.543960513000002 podStartE2EDuration="21.543960513s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.536020507 +0000 UTC m=+41.234778852" watchObservedRunningTime="2025-12-08 21:19:24.543960513 +0000 UTC m=+41.242718848" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.600437 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-t5m7x" podStartSLOduration=6.6004125689999995 podStartE2EDuration="6.600412569s" podCreationTimestamp="2025-12-08 21:19:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.595456373 +0000 UTC m=+41.294214708" watchObservedRunningTime="2025-12-08 21:19:24.600412569 +0000 UTC m=+41.299170914" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.602734 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.603011 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.10298917 +0000 UTC m=+41.801747505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.607406 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.609149 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.109114474 +0000 UTC m=+41.807872829 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.708513 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.709012 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.208992039 +0000 UTC m=+41.907750384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.817957 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.818401 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.318386739 +0000 UTC m=+42.017145094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.920640 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:24 crc kubenswrapper[4791]: E1208 21:19:24.921580 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.421561252 +0000 UTC m=+42.120319597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.932371 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" event={"ID":"409596b6-1fa0-416d-b5a3-a06c2e36c15b","Type":"ContainerStarted","Data":"69f994b6dfedf1324c092e64d4ce5841538a24467cc84da9b5b608f55afdc42c"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.932420 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" event={"ID":"409596b6-1fa0-416d-b5a3-a06c2e36c15b","Type":"ContainerStarted","Data":"5aad25904232fdaaa877cc4f3b26c511e4dc7dfdcdc136eea6062a8250593b45"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.934275 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.935090 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" event={"ID":"cd3251b3-7787-4b6f-bcad-dc0738414547","Type":"ContainerStarted","Data":"3fceeb30da95c09c4b6d08d0ff7ab2ee33bad015e79d297b4d688bb7232f338e"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.939940 4791 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-w9zwt container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/healthz\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.940023 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.16:8080/healthz\": dial tcp 10.217.0.16:8080: connect: connection refused" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.940235 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7f84n" event={"ID":"565d3927-29fc-4e86-8c1b-552a14386bc0","Type":"ContainerStarted","Data":"43e48afbf904276406a98590b321eb00ed7ca91f492d8b5257225a016c1dbb36"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.954259 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" event={"ID":"549eb9c0-fe38-43ee-a589-daa378a20d48","Type":"ContainerStarted","Data":"cb0818b4cee6b99f35f3aaf6e56101d673cf540e47852be094e92193e4f82112"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.957941 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xpmb4" event={"ID":"7f42a609-f815-4f83-b09b-cf94f54e6581","Type":"ContainerStarted","Data":"59617a857b3784632bb7678d5c9342dd65b14a78b81ed3c1c34e11598241a94a"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.959940 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" event={"ID":"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7","Type":"ContainerStarted","Data":"2826106ab769f991f5a634d595c623e64dfac05c919a5ff3bd408c8e8a961d5e"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.960996 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" event={"ID":"8256b973-cb50-4848-aabf-109537321b94","Type":"ContainerStarted","Data":"c3fd50f297ffd41e14d8391a90548d4e31efbd499c4f886539cea5e612bd7329"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.965764 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" podStartSLOduration=21.965746459000002 podStartE2EDuration="21.965746459s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.964266685 +0000 UTC m=+41.663025030" watchObservedRunningTime="2025-12-08 21:19:24.965746459 +0000 UTC m=+41.664504804" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.970655 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" event={"ID":"8eec1570-5f40-4b0d-9a0f-04e381eac889","Type":"ContainerStarted","Data":"6593e73699dd39b8b88f2f77eeebeeddfd0a4c3bc64f62a752c303bb0970a5ae"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.972613 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" event={"ID":"527bb8ce-24f8-4bcf-a100-457e11dac79d","Type":"ContainerStarted","Data":"773e88a83d7a93cbad5a1708268eb559e68704544c012d3248a01d5b1435b9e2"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.982158 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-xpmb4" podStartSLOduration=6.982137154 podStartE2EDuration="6.982137154s" podCreationTimestamp="2025-12-08 21:19:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:24.980516916 +0000 UTC m=+41.679275261" watchObservedRunningTime="2025-12-08 21:19:24.982137154 +0000 UTC m=+41.680895499" Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.995996 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" event={"ID":"839bdb0e-d6c5-4464-8e23-ee63845cf40f","Type":"ContainerStarted","Data":"eb05181c793e0a0f8026151050234e14f33c7f50dd1a50e110d0072148717d5f"} Dec 08 21:19:24 crc kubenswrapper[4791]: I1208 21:19:24.996445 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" event={"ID":"839bdb0e-d6c5-4464-8e23-ee63845cf40f","Type":"ContainerStarted","Data":"5eb01d65d7ee24217b247f0be0c1aa27fefb069c0de3f5cc5f529e64389145ae"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.015311 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-68nlk" podStartSLOduration=22.015289983 podStartE2EDuration="22.015289983s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:25.012757333 +0000 UTC m=+41.711515698" watchObservedRunningTime="2025-12-08 21:19:25.015289983 +0000 UTC m=+41.714048328" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.023630 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.024208 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.524195722 +0000 UTC m=+42.222954067 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.066033 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" event={"ID":"665b13a1-0c0e-4249-b7ec-f862eaeb3aea","Type":"ContainerStarted","Data":"01546c232d0f4b1f6b6d02cff008412c31a26b963f83e380844053ccc7f6481c"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.066073 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" event={"ID":"665b13a1-0c0e-4249-b7ec-f862eaeb3aea","Type":"ContainerStarted","Data":"0436889897be327e59b4dde61083b400404b8b1ffff82417a07c159111c6c397"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.090184 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-6fncw" podStartSLOduration=22.090169802 podStartE2EDuration="22.090169802s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:25.088848721 +0000 UTC m=+41.787607066" watchObservedRunningTime="2025-12-08 21:19:25.090169802 +0000 UTC m=+41.788928147" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.097811 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" event={"ID":"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63","Type":"ContainerStarted","Data":"2ed3503744e1269da810194a2243ae787cf05fc04b119331eaa1037d4838d741"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.097862 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" event={"ID":"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63","Type":"ContainerStarted","Data":"f7e160245672566d83f7e4c03a08f775f42b679f65ed00640f38d6adf5b68523"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.114263 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-d259k" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.134478 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.134585 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.634562214 +0000 UTC m=+42.333320559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.134726 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.135599 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.635585108 +0000 UTC m=+42.334343453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.186026 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" event={"ID":"001d07d1-9cd0-4c97-bb65-29457e205813","Type":"ContainerStarted","Data":"d2f14b061a234c12d915249e9847273a50c0aaf02d89286c31dac9059b0ab767"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.186410 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" event={"ID":"001d07d1-9cd0-4c97-bb65-29457e205813","Type":"ContainerStarted","Data":"acf643dba194a47cc9c77e785a923fad24490754449967ee2dc2f128f1f745ec"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.211201 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2nxp5" event={"ID":"295d8719-2569-4b02-a1a7-3dd2a2b119a8","Type":"ContainerStarted","Data":"539e21a7fee808bd21de8597107f6e9b48f15cb8d08ec632a2bf18edcc5e6205"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.238745 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.239432 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.739403016 +0000 UTC m=+42.438161361 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.249504 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" event={"ID":"91289879-25be-460a-9639-cddfd77cd942","Type":"ContainerStarted","Data":"584f8c85f535002eab7905f1c9fa50f4bfd564febe799e525f4ecd8fc436b5e1"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.249553 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" event={"ID":"91289879-25be-460a-9639-cddfd77cd942","Type":"ContainerStarted","Data":"5a7185abca988a4968625fbfbceaea050215a58252e8fccf0c077f7d7c44d1c7"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.269877 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" event={"ID":"7d3897ba-b8f0-4eea-948f-74af9801350b","Type":"ContainerStarted","Data":"fd1a3f79e3cce597d4331dd143f015838c4d5405efee7c062c93386ed8d51048"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.327202 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" event={"ID":"97356cb4-53ea-4094-a7ac-28cde046b53c","Type":"ContainerStarted","Data":"3234de0bbbcff7c0ab9a045f130222b04d6cce8b5c9936efee04c9dd8d31937d"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.340907 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" event={"ID":"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c","Type":"ContainerStarted","Data":"0df3fecb817c7fd7c1d939abd15ae26a4e4e3ab63e62f8a76f5c8dbd5316bbda"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.341163 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.341437 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.841426792 +0000 UTC m=+42.540185137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.366949 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" event={"ID":"6b194866-8f4f-4a5c-bf04-117a87fd1836","Type":"ContainerStarted","Data":"7631d8d383ac1fb9c1511cad63f4d86cd85181da646c53cf5241c63d5eaa5ee1"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.372591 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8hq8w" podStartSLOduration=22.372574334 podStartE2EDuration="22.372574334s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:25.290070446 +0000 UTC m=+41.988828791" watchObservedRunningTime="2025-12-08 21:19:25.372574334 +0000 UTC m=+42.071332679" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.399454 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:25 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:25 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:25 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.399527 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.444198 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.445788 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" event={"ID":"e9bbe11b-6bc7-4bb3-89cc-178d572202e1","Type":"ContainerStarted","Data":"f008819b859d84f5a0d813a53c75da18238347abdb92b444ebd0f3169847eb18"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.445822 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" event={"ID":"e9bbe11b-6bc7-4bb3-89cc-178d572202e1","Type":"ContainerStarted","Data":"104fe21664b6e5bd3234c9b250d639579e724eaeeb77d587160f3c93a1fbe9dc"} Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.446495 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:25.946463449 +0000 UTC m=+42.645221794 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.464819 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wl6d7" event={"ID":"74d9b7de-2712-4320-b41a-1e5c91bd36e7","Type":"ContainerStarted","Data":"6a375a93951183ef6ff3f4cfb3151138850920d9b3a72ada6e469712cdcabf8a"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.464871 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wl6d7" event={"ID":"74d9b7de-2712-4320-b41a-1e5c91bd36e7","Type":"ContainerStarted","Data":"188ae52b356097f3f2b1cb52deceee42ba47cd98002cdb511104006b89670a64"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.466009 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-wl6d7" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.473909 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-wl6d7 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.473962 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wl6d7" podUID="74d9b7de-2712-4320-b41a-1e5c91bd36e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.475238 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" podStartSLOduration=23.475223915 podStartE2EDuration="23.475223915s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:25.372432911 +0000 UTC m=+42.071191256" watchObservedRunningTime="2025-12-08 21:19:25.475223915 +0000 UTC m=+42.173982260" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.476282 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" podStartSLOduration=22.47627675 podStartE2EDuration="22.47627675s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:25.476263929 +0000 UTC m=+42.175022274" watchObservedRunningTime="2025-12-08 21:19:25.47627675 +0000 UTC m=+42.175035095" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.504167 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" event={"ID":"1789d3a9-3a3a-421c-ad72-cb08f45afa70","Type":"ContainerStarted","Data":"920bad8e5be5b96ea36af89f5553fba7d1c8f50800691fb1eefd78c8a0e118f7"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.512858 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" event={"ID":"12b03a53-7257-43c2-98b5-4fba9fa582cb","Type":"ContainerStarted","Data":"66da77cdc955990045a64955165dd7d543755ea81a575d6ad2dd66c1c56affda"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.515512 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" event={"ID":"ee523989-5472-461e-8673-c5e80f5216e1","Type":"ContainerStarted","Data":"cc395adccd48a737cabb8940836b44c3b9d31de65debf70395df5f60474fd5cc"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.528933 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" event={"ID":"e1b46aaa-47dd-462d-835a-e688e19f4fca","Type":"ContainerStarted","Data":"8805caecd75accc77ea06998dc8087329c8b96193dacbcaf02fd1bf7350ed699"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.529141 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.531294 4791 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-rvvfm container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.531355 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" podUID="e1b46aaa-47dd-462d-835a-e688e19f4fca" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.536769 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ktmqq" event={"ID":"7e41f56a-953a-4454-863e-566cf339ab08","Type":"ContainerStarted","Data":"a58614bcab148db9a04032c4541ca30517384e6656495d8311d06c5875603950"} Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.546214 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.548161 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.048143637 +0000 UTC m=+42.746901982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.559223 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-wl6d7" podStartSLOduration=23.559204147 podStartE2EDuration="23.559204147s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:25.503995961 +0000 UTC m=+42.202754306" watchObservedRunningTime="2025-12-08 21:19:25.559204147 +0000 UTC m=+42.257962492" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.560435 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gj9w7" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.593099 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" podStartSLOduration=22.593059832 podStartE2EDuration="22.593059832s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:25.559924184 +0000 UTC m=+42.258682519" watchObservedRunningTime="2025-12-08 21:19:25.593059832 +0000 UTC m=+42.291818177" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.647471 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.649786 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.149763594 +0000 UTC m=+42.848521929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.696658 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.750131 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.750513 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.25050052 +0000 UTC m=+42.949258865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.850982 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.851377 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.351362239 +0000 UTC m=+43.050120584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:25 crc kubenswrapper[4791]: I1208 21:19:25.952389 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:25 crc kubenswrapper[4791]: E1208 21:19:25.952692 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.452679938 +0000 UTC m=+43.151438273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.058077 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.058800 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.55878501 +0000 UTC m=+43.257543355 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.159729 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.160286 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.660272894 +0000 UTC m=+43.359031239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.261283 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.261920 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.761905701 +0000 UTC m=+43.460664046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.362968 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.363327 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.863315892 +0000 UTC m=+43.562074237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.393795 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:26 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:26 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:26 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.393853 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.442767 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-2kt8c"] Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.464531 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.464931 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:26.964916368 +0000 UTC m=+43.663674713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.566103 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.566769 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.06675389 +0000 UTC m=+43.765512235 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.567534 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7f84n" event={"ID":"565d3927-29fc-4e86-8c1b-552a14386bc0","Type":"ContainerStarted","Data":"2bdc9326215eeceabbf38dc22bcd48e4d6013bef1698c8fea00e1bc523842245"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.585985 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" event={"ID":"e1c4e40d-0ac4-4a7f-9803-4be4a15fdbf7","Type":"ContainerStarted","Data":"cc89f98080da1f93ffc8fbc9ccfba52b3448f8addd1700ac836301be4cb3f302"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.600134 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" event={"ID":"8eec1570-5f40-4b0d-9a0f-04e381eac889","Type":"ContainerStarted","Data":"5a7331c2432b3f7fef6e1ce10cbe37cb8659b22221ceb13005ffe006e0d109d7"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.602324 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt8l7" event={"ID":"97356cb4-53ea-4094-a7ac-28cde046b53c","Type":"ContainerStarted","Data":"408f45c9fdd67f41766365d2313b45a77c6498416dea7ae8b81a7d40247a6e90"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.605789 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" event={"ID":"0de53f0b-4ad4-43c0-bc9b-cfbbd5cc0c2c","Type":"ContainerStarted","Data":"dab13c050621fd6a1ea69710effce00007eaa78b8847a2c589ac6db9b8c813c6"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.611098 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" event={"ID":"cd3251b3-7787-4b6f-bcad-dc0738414547","Type":"ContainerStarted","Data":"fc228e8d1e677cfe24a05e98d343d58359a6e604eef4d90e6dc8923a3884fd66"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.614540 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" event={"ID":"12b03a53-7257-43c2-98b5-4fba9fa582cb","Type":"ContainerStarted","Data":"8f8366ec2cda2baec03f771d9cce884a402c52d1c49c507f409f93f9f68145fb"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.614576 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" event={"ID":"12b03a53-7257-43c2-98b5-4fba9fa582cb","Type":"ContainerStarted","Data":"2f7505c90efe2fa090e77fd7569e6ab37a8fff13abfb34ae16aea85bc59073ae"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.617950 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" event={"ID":"6b194866-8f4f-4a5c-bf04-117a87fd1836","Type":"ContainerStarted","Data":"146c6900e7b5fae091143f06ab05419bf89d2ca9e375f0b66658eeeb35a5ecb4"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.621137 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" event={"ID":"ec69aeaf-ae67-4d6f-8342-4d3e84db8f63","Type":"ContainerStarted","Data":"c9f168b51f4f602602fad03110067371a8f7bd83b4c28239fe9ae253a845e97e"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.622049 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q8wd6" podStartSLOduration=23.622038059 podStartE2EDuration="23.622038059s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.619784555 +0000 UTC m=+43.318542910" watchObservedRunningTime="2025-12-08 21:19:26.622038059 +0000 UTC m=+43.320796404" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.639419 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" event={"ID":"001d07d1-9cd0-4c97-bb65-29457e205813","Type":"ContainerStarted","Data":"a0c0c88a0d4dee1345008d410d427238234197a81dce0d779004fb8d4dfefb55"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.646246 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" event={"ID":"ee523989-5472-461e-8673-c5e80f5216e1","Type":"ContainerStarted","Data":"5c2fbd220031ae462c2445beed12738eb5f9e72e1f09b659c4ad540d3ffb0521"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.646296 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" event={"ID":"ee523989-5472-461e-8673-c5e80f5216e1","Type":"ContainerStarted","Data":"bb882d3700683b64adbe59f34304a846dc0d85025dcf1448708e62ca6bcf60a3"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.648176 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" event={"ID":"549eb9c0-fe38-43ee-a589-daa378a20d48","Type":"ContainerStarted","Data":"1a314d964ff93932f5499ea6aae7ec48e91750d69f3e414ff22e6c4366df7507"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.648985 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.655484 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" event={"ID":"7d3897ba-b8f0-4eea-948f-74af9801350b","Type":"ContainerStarted","Data":"bb44dc7fe0e7f26f4c4c1a1069844530da9dd473ebe1a7075061583f5ebf4d04"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.655532 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" event={"ID":"7d3897ba-b8f0-4eea-948f-74af9801350b","Type":"ContainerStarted","Data":"ac1db5bf3cac838712bb24ab9b0cf9ec73b21a36aee870dd468418e09c1ce0c9"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.656179 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.657352 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" event={"ID":"527bb8ce-24f8-4bcf-a100-457e11dac79d","Type":"ContainerStarted","Data":"49fe55620982a62abc6abcd0a4bbd76055ac3cf31d2627423e8539f2029b7b50"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.659739 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rdmr7" event={"ID":"e9bbe11b-6bc7-4bb3-89cc-178d572202e1","Type":"ContainerStarted","Data":"a64ee2b98de77fc23fa01f3c527f889fc0f3a63bc89bb7fd2aeb4bb8c37355bd"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.660516 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.667205 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2nxp5" event={"ID":"295d8719-2569-4b02-a1a7-3dd2a2b119a8","Type":"ContainerStarted","Data":"c260c0b44a5925f595196bd64420ff489cfcfa7867c2fb600e2e517be6d6a75a"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.667271 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2nxp5" event={"ID":"295d8719-2569-4b02-a1a7-3dd2a2b119a8","Type":"ContainerStarted","Data":"c2c68837d208a5bf08449ebad75954501f34c1d705075253724cd9fce9de840c"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.668871 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.670307 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.170292472 +0000 UTC m=+43.869050817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.687560 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4f4sg" podStartSLOduration=23.687543957 podStartE2EDuration="23.687543957s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.654199984 +0000 UTC m=+43.352958339" watchObservedRunningTime="2025-12-08 21:19:26.687543957 +0000 UTC m=+43.386302302" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.709016 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" event={"ID":"e1b46aaa-47dd-462d-835a-e688e19f4fca","Type":"ContainerStarted","Data":"3533d3adf19f7cdd0e7863039ec60e8765962f62783b819f687e1e7dc7a8c943"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.719681 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rvvfm" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.729975 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-5tc82" podStartSLOduration=23.729957833 podStartE2EDuration="23.729957833s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.687990467 +0000 UTC m=+43.386748812" watchObservedRunningTime="2025-12-08 21:19:26.729957833 +0000 UTC m=+43.428716178" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.745937 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" event={"ID":"8256b973-cb50-4848-aabf-109537321b94","Type":"ContainerStarted","Data":"dff4a86ef079706af593541af1a799823e3ff51436eabeb107ed7a2cc5ad0442"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.759022 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" event={"ID":"1789d3a9-3a3a-421c-ad72-cb08f45afa70","Type":"ContainerStarted","Data":"4634eb210a5d0cffe2201d6922f3e1bbab745865ede8af0e76839b3fa9cd1411"} Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.759071 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.760437 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-wl6d7 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.760473 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wl6d7" podUID="74d9b7de-2712-4320-b41a-1e5c91bd36e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.764284 4791 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-w9zwt container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/healthz\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.768763 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.16:8080/healthz\": dial tcp 10.217.0.16:8080: connect: connection refused" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.772844 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.775565 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jxmvh" podStartSLOduration=23.775550754 podStartE2EDuration="23.775550754s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.730639039 +0000 UTC m=+43.429397384" watchObservedRunningTime="2025-12-08 21:19:26.775550754 +0000 UTC m=+43.474309099" Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.776269 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.27625164 +0000 UTC m=+43.975009985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.779880 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7rbn4" podStartSLOduration=23.779870125 podStartE2EDuration="23.779870125s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.774194562 +0000 UTC m=+43.472952927" watchObservedRunningTime="2025-12-08 21:19:26.779870125 +0000 UTC m=+43.478628480" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.800042 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-d6h2c" podStartSLOduration=23.800026758 podStartE2EDuration="23.800026758s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.798969673 +0000 UTC m=+43.497728028" watchObservedRunningTime="2025-12-08 21:19:26.800026758 +0000 UTC m=+43.498785103" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.842851 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-2nxp5" podStartSLOduration=24.842836303 podStartE2EDuration="24.842836303s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.841477171 +0000 UTC m=+43.540235516" watchObservedRunningTime="2025-12-08 21:19:26.842836303 +0000 UTC m=+43.541594648" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.874136 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.875879 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.375860749 +0000 UTC m=+44.074619094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.878880 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.881414 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.381394709 +0000 UTC m=+44.080153154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.888204 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" podStartSLOduration=23.888188318 podStartE2EDuration="23.888188318s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.887388099 +0000 UTC m=+43.586146444" watchObservedRunningTime="2025-12-08 21:19:26.888188318 +0000 UTC m=+43.586946663" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.960279 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-rdpmz" podStartSLOduration=24.960259811 podStartE2EDuration="24.960259811s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:26.945427973 +0000 UTC m=+43.644186318" watchObservedRunningTime="2025-12-08 21:19:26.960259811 +0000 UTC m=+43.659018156" Dec 08 21:19:26 crc kubenswrapper[4791]: I1208 21:19:26.984433 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:26 crc kubenswrapper[4791]: E1208 21:19:26.984848 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.484832908 +0000 UTC m=+44.183591253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.054009 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-sh5nr" podStartSLOduration=24.053994072 podStartE2EDuration="24.053994072s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:27.051156786 +0000 UTC m=+43.749915131" watchObservedRunningTime="2025-12-08 21:19:27.053994072 +0000 UTC m=+43.752752417" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.078586 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pm6zw" podStartSLOduration=24.078569369 podStartE2EDuration="24.078569369s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:27.07772629 +0000 UTC m=+43.776484635" watchObservedRunningTime="2025-12-08 21:19:27.078569369 +0000 UTC m=+43.777327724" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.086375 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.086742 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.586728811 +0000 UTC m=+44.285487146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.107859 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zp8m4" podStartSLOduration=24.107845227 podStartE2EDuration="24.107845227s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:27.10710253 +0000 UTC m=+43.805860875" watchObservedRunningTime="2025-12-08 21:19:27.107845227 +0000 UTC m=+43.806603572" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.136410 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" podStartSLOduration=25.136386537 podStartE2EDuration="25.136386537s" podCreationTimestamp="2025-12-08 21:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:27.134153275 +0000 UTC m=+43.832911640" watchObservedRunningTime="2025-12-08 21:19:27.136386537 +0000 UTC m=+43.835144882" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.174624 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" podStartSLOduration=24.174607265 podStartE2EDuration="24.174607265s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:27.173178271 +0000 UTC m=+43.871936616" watchObservedRunningTime="2025-12-08 21:19:27.174607265 +0000 UTC m=+43.873365610" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.190212 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.190548 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.690529709 +0000 UTC m=+44.389288054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.291915 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.292262 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.792246748 +0000 UTC m=+44.491005093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.393478 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.393680 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.893647839 +0000 UTC m=+44.592406184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.393981 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.394291 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.894276314 +0000 UTC m=+44.593034659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.403627 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:27 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:27 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:27 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.403693 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.495396 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.495553 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.995521202 +0000 UTC m=+44.694279547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.495939 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.496254 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.996241779 +0000 UTC m=+44.695000124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.597027 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.597221 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.09718853 +0000 UTC m=+44.795946875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.597344 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.597699 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.097689791 +0000 UTC m=+44.796448136 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.698935 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.699259 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.199242146 +0000 UTC m=+44.898000491 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.758868 4791 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-gnc4f container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.758949 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" podUID="1789d3a9-3a3a-421c-ad72-cb08f45afa70" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.31:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.763913 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" event={"ID":"8256b973-cb50-4848-aabf-109537321b94","Type":"ContainerStarted","Data":"707f1fa85b83a7cdb7e6288c8c21cd952f9c60c7969f13d13b63c659ce1cdc1d"} Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.767448 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7f84n" event={"ID":"565d3927-29fc-4e86-8c1b-552a14386bc0","Type":"ContainerStarted","Data":"e17ce143772eafb64c67ab9db4fa03bcc0374895d7e4e904576bd62675e732c5"} Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.772650 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" gracePeriod=30 Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.773684 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-wl6d7 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.773786 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wl6d7" podUID="74d9b7de-2712-4320-b41a-1e5c91bd36e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.774023 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.801559 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.802136 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.302119742 +0000 UTC m=+45.000878087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.802591 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-7f84n" podStartSLOduration=9.802567713 podStartE2EDuration="9.802567713s" podCreationTimestamp="2025-12-08 21:19:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:27.794066323 +0000 UTC m=+44.492824678" watchObservedRunningTime="2025-12-08 21:19:27.802567713 +0000 UTC m=+44.501326058" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.828634 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-npdcj"] Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.829871 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.835576 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.847509 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-npdcj"] Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.902743 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.902957 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.40292444 +0000 UTC m=+45.101682795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.905042 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:27 crc kubenswrapper[4791]: E1208 21:19:27.905212 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.405201883 +0000 UTC m=+45.103960218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:27 crc kubenswrapper[4791]: I1208 21:19:27.948252 4791 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.013111 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.013677 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-catalog-content\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.013756 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcngh\" (UniqueName: \"kubernetes.io/projected/0decf941-a6e4-485f-afd4-7972d332952a-kube-api-access-qcngh\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.014034 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-utilities\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.014215 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.514193993 +0000 UTC m=+45.212952348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.050374 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mbbd4"] Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.051666 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.058075 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.068578 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mbbd4"] Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.075542 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gnc4f" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.116327 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-utilities\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.116396 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.116430 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-catalog-content\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.116470 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcngh\" (UniqueName: \"kubernetes.io/projected/0decf941-a6e4-485f-afd4-7972d332952a-kube-api-access-qcngh\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.116837 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.616825203 +0000 UTC m=+45.315583548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.117242 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-catalog-content\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.117282 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-utilities\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.184385 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcngh\" (UniqueName: \"kubernetes.io/projected/0decf941-a6e4-485f-afd4-7972d332952a-kube-api-access-qcngh\") pod \"community-operators-npdcj\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.217104 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.217229 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.717211911 +0000 UTC m=+45.415970256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.217785 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-utilities\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.217840 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dls9d\" (UniqueName: \"kubernetes.io/projected/175a64fd-0187-4d28-87f1-76194cac1bf2-kube-api-access-dls9d\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.217893 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.218009 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-catalog-content\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.218364 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.718329957 +0000 UTC m=+45.417088512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.221614 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xfw54"] Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.222760 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.236444 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xfw54"] Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.318614 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.318631 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.818609983 +0000 UTC m=+45.517368328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.318962 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-utilities\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.318990 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmjg5\" (UniqueName: \"kubernetes.io/projected/85644602-2976-45a0-a2ae-f324c48d3ed5-kube-api-access-bmjg5\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.319086 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-catalog-content\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.319121 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-utilities\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.319169 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dls9d\" (UniqueName: \"kubernetes.io/projected/175a64fd-0187-4d28-87f1-76194cac1bf2-kube-api-access-dls9d\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.319219 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.319263 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-catalog-content\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.319611 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.819596886 +0000 UTC m=+45.518355241 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.319626 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-utilities\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.334086 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dls9d\" (UniqueName: \"kubernetes.io/projected/175a64fd-0187-4d28-87f1-76194cac1bf2-kube-api-access-dls9d\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.337493 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-catalog-content\") pod \"certified-operators-mbbd4\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.389976 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:28 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:28 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:28 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.390035 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.420685 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.420843 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.920819073 +0000 UTC m=+45.619577428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.420935 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.420977 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-catalog-content\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.421007 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-utilities\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.421031 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmjg5\" (UniqueName: \"kubernetes.io/projected/85644602-2976-45a0-a2ae-f324c48d3ed5-kube-api-access-bmjg5\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.421680 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:28.921667963 +0000 UTC m=+45.620426328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.421939 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dgxzp"] Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.422307 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-utilities\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.422335 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-catalog-content\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.423108 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.430558 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.434913 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgxzp"] Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.443265 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.452679 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmjg5\" (UniqueName: \"kubernetes.io/projected/85644602-2976-45a0-a2ae-f324c48d3ed5-kube-api-access-bmjg5\") pod \"community-operators-xfw54\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.524454 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.524814 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-catalog-content\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.524854 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-utilities\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.524926 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9t5z\" (UniqueName: \"kubernetes.io/projected/102d168e-2cef-47ee-8911-1724e0d1982d-kube-api-access-n9t5z\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.525041 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.02501945 +0000 UTC m=+45.723777795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.543658 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.630703 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-catalog-content\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.630782 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-utilities\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.630815 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.630880 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9t5z\" (UniqueName: \"kubernetes.io/projected/102d168e-2cef-47ee-8911-1724e0d1982d-kube-api-access-n9t5z\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.631350 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.131332367 +0000 UTC m=+45.830090712 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lk2tp" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.631673 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-catalog-content\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.631944 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-utilities\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.657598 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9t5z\" (UniqueName: \"kubernetes.io/projected/102d168e-2cef-47ee-8911-1724e0d1982d-kube-api-access-n9t5z\") pod \"certified-operators-dgxzp\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.731850 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:28 crc kubenswrapper[4791]: E1208 21:19:28.732213 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.232198956 +0000 UTC m=+45.930957301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.741363 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.761404 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-npdcj"] Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.763996 4791 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-08T21:19:27.948287615Z","Handler":null,"Name":""} Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.775140 4791 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.775183 4791 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.785579 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" event={"ID":"8256b973-cb50-4848-aabf-109537321b94","Type":"ContainerStarted","Data":"71328c6ec992d083b7cee8e6780afb7cf683dc51e4a52fd1586ee4fa274df155"} Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.785624 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" event={"ID":"8256b973-cb50-4848-aabf-109537321b94","Type":"ContainerStarted","Data":"9708cd973989680e044964f5c0de1e0fc2c052b7b7a02cdf291f5e8c6918da8e"} Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.787629 4791 generic.go:334] "Generic (PLEG): container finished" podID="527bb8ce-24f8-4bcf-a100-457e11dac79d" containerID="49fe55620982a62abc6abcd0a4bbd76055ac3cf31d2627423e8539f2029b7b50" exitCode=0 Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.788767 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" event={"ID":"527bb8ce-24f8-4bcf-a100-457e11dac79d","Type":"ContainerDied","Data":"49fe55620982a62abc6abcd0a4bbd76055ac3cf31d2627423e8539f2029b7b50"} Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.833083 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.838739 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.838790 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.839902 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-rnxcp" podStartSLOduration=10.839879475 podStartE2EDuration="10.839879475s" podCreationTimestamp="2025-12-08 21:19:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:28.815162724 +0000 UTC m=+45.513921079" watchObservedRunningTime="2025-12-08 21:19:28.839879475 +0000 UTC m=+45.538637820" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.872382 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lk2tp\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.914400 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mbbd4"] Dec 08 21:19:28 crc kubenswrapper[4791]: W1208 21:19:28.930281 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod175a64fd_0187_4d28_87f1_76194cac1bf2.slice/crio-90c5f19fe2c420ed1a1ffbe28bdf56c9cf6ebd4197ae67d5aa706248f924c1c5 WatchSource:0}: Error finding container 90c5f19fe2c420ed1a1ffbe28bdf56c9cf6ebd4197ae67d5aa706248f924c1c5: Status 404 returned error can't find the container with id 90c5f19fe2c420ed1a1ffbe28bdf56c9cf6ebd4197ae67d5aa706248f924c1c5 Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.934277 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:28 crc kubenswrapper[4791]: I1208 21:19:28.981431 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.022909 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgxzp"] Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.036640 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xfw54"] Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.131191 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.369435 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lk2tp"] Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.389255 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:29 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:29 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:29 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.389317 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.431977 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.436027 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-cbzwp" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.605799 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.615724 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.616533 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.618035 4791 patch_prober.go:28] interesting pod/console-f9d7485db-vck6k container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.618076 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-vck6k" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.794507 4791 generic.go:334] "Generic (PLEG): container finished" podID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerID="724add11e0d6cb434e1283c59e4f037adcf8d45c7ffc5fbd3e7b1b635198b396" exitCode=0 Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.794563 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfw54" event={"ID":"85644602-2976-45a0-a2ae-f324c48d3ed5","Type":"ContainerDied","Data":"724add11e0d6cb434e1283c59e4f037adcf8d45c7ffc5fbd3e7b1b635198b396"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.794627 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfw54" event={"ID":"85644602-2976-45a0-a2ae-f324c48d3ed5","Type":"ContainerStarted","Data":"8d1d1c61ec27b252a13030550c7aa221a02039fdbfd7b7db6ab19ac52d364d4b"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.796597 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.796813 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" event={"ID":"097c1e37-dbfb-4e31-9c4d-561c6bed9933","Type":"ContainerStarted","Data":"794aa42a619242e946a94d06aeaa582e9b8bb74e8b45ea5a8753b040f252cee4"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.796886 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.796898 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" event={"ID":"097c1e37-dbfb-4e31-9c4d-561c6bed9933","Type":"ContainerStarted","Data":"747841df8457cb7eced4df60ab2d49c1535f36fda66f0b2ec841619c761e1415"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.798472 4791 generic.go:334] "Generic (PLEG): container finished" podID="102d168e-2cef-47ee-8911-1724e0d1982d" containerID="541db9b23bbe04f31fc4bb9d6f32d1b9124bd8f426907f81cbfce29884cbb6e5" exitCode=0 Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.798534 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgxzp" event={"ID":"102d168e-2cef-47ee-8911-1724e0d1982d","Type":"ContainerDied","Data":"541db9b23bbe04f31fc4bb9d6f32d1b9124bd8f426907f81cbfce29884cbb6e5"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.798552 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgxzp" event={"ID":"102d168e-2cef-47ee-8911-1724e0d1982d","Type":"ContainerStarted","Data":"e9372d8229ccc95491b6aa18c5efa7f5217e6cee73ae00ac4505fdf5117c61a6"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.801124 4791 generic.go:334] "Generic (PLEG): container finished" podID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerID="9356ac4bc6716c458253c8ab544897a7b7e1405f240e7744b2b65540420edbbc" exitCode=0 Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.801200 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbbd4" event={"ID":"175a64fd-0187-4d28-87f1-76194cac1bf2","Type":"ContainerDied","Data":"9356ac4bc6716c458253c8ab544897a7b7e1405f240e7744b2b65540420edbbc"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.801230 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbbd4" event={"ID":"175a64fd-0187-4d28-87f1-76194cac1bf2","Type":"ContainerStarted","Data":"90c5f19fe2c420ed1a1ffbe28bdf56c9cf6ebd4197ae67d5aa706248f924c1c5"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.806971 4791 generic.go:334] "Generic (PLEG): container finished" podID="0decf941-a6e4-485f-afd4-7972d332952a" containerID="fa6d6768491dba1ca6277230431287d3f7f5aa712e2f4983b82d1dab48a2facd" exitCode=0 Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.807011 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npdcj" event={"ID":"0decf941-a6e4-485f-afd4-7972d332952a","Type":"ContainerDied","Data":"fa6d6768491dba1ca6277230431287d3f7f5aa712e2f4983b82d1dab48a2facd"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.807061 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npdcj" event={"ID":"0decf941-a6e4-485f-afd4-7972d332952a","Type":"ContainerStarted","Data":"74baff70fede653d5c0b8a44fba98e179b1e2f8262c5ffc82b6504d8a88b4086"} Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.825087 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ck6br"] Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.829353 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.837918 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck6br"] Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.838093 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.908272 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" podStartSLOduration=26.908254756 podStartE2EDuration="26.908254756s" podCreationTimestamp="2025-12-08 21:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:29.905219445 +0000 UTC m=+46.603977800" watchObservedRunningTime="2025-12-08 21:19:29.908254756 +0000 UTC m=+46.607013091" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.951586 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-catalog-content\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.951663 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8v54\" (UniqueName: \"kubernetes.io/projected/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-kube-api-access-b8v54\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:29 crc kubenswrapper[4791]: I1208 21:19:29.951985 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-utilities\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.056411 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-utilities\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.056482 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-catalog-content\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.056506 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8v54\" (UniqueName: \"kubernetes.io/projected/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-kube-api-access-b8v54\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.057274 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-utilities\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.057516 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-catalog-content\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.114089 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8v54\" (UniqueName: \"kubernetes.io/projected/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-kube-api-access-b8v54\") pod \"redhat-marketplace-ck6br\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.161215 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.214140 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.232066 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4chcv"] Dec 08 21:19:30 crc kubenswrapper[4791]: E1208 21:19:30.232274 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527bb8ce-24f8-4bcf-a100-457e11dac79d" containerName="collect-profiles" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.232286 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="527bb8ce-24f8-4bcf-a100-457e11dac79d" containerName="collect-profiles" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.232405 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="527bb8ce-24f8-4bcf-a100-457e11dac79d" containerName="collect-profiles" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.233152 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.251491 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4chcv"] Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.361007 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfxbk\" (UniqueName: \"kubernetes.io/projected/527bb8ce-24f8-4bcf-a100-457e11dac79d-kube-api-access-vfxbk\") pod \"527bb8ce-24f8-4bcf-a100-457e11dac79d\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.361487 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/527bb8ce-24f8-4bcf-a100-457e11dac79d-secret-volume\") pod \"527bb8ce-24f8-4bcf-a100-457e11dac79d\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.361565 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/527bb8ce-24f8-4bcf-a100-457e11dac79d-config-volume\") pod \"527bb8ce-24f8-4bcf-a100-457e11dac79d\" (UID: \"527bb8ce-24f8-4bcf-a100-457e11dac79d\") " Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.361702 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kct8z\" (UniqueName: \"kubernetes.io/projected/78dfcd05-fbfb-4b65-8d75-2fa345534b21-kube-api-access-kct8z\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.361763 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-utilities\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.361780 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-catalog-content\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.362770 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/527bb8ce-24f8-4bcf-a100-457e11dac79d-config-volume" (OuterVolumeSpecName: "config-volume") pod "527bb8ce-24f8-4bcf-a100-457e11dac79d" (UID: "527bb8ce-24f8-4bcf-a100-457e11dac79d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.367128 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/527bb8ce-24f8-4bcf-a100-457e11dac79d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "527bb8ce-24f8-4bcf-a100-457e11dac79d" (UID: "527bb8ce-24f8-4bcf-a100-457e11dac79d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.371177 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/527bb8ce-24f8-4bcf-a100-457e11dac79d-kube-api-access-vfxbk" (OuterVolumeSpecName: "kube-api-access-vfxbk") pod "527bb8ce-24f8-4bcf-a100-457e11dac79d" (UID: "527bb8ce-24f8-4bcf-a100-457e11dac79d"). InnerVolumeSpecName "kube-api-access-vfxbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.389650 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:30 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:30 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:30 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.389726 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464174 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kct8z\" (UniqueName: \"kubernetes.io/projected/78dfcd05-fbfb-4b65-8d75-2fa345534b21-kube-api-access-kct8z\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464244 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-utilities\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464267 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-catalog-content\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464410 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/527bb8ce-24f8-4bcf-a100-457e11dac79d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464430 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/527bb8ce-24f8-4bcf-a100-457e11dac79d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464442 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfxbk\" (UniqueName: \"kubernetes.io/projected/527bb8ce-24f8-4bcf-a100-457e11dac79d-kube-api-access-vfxbk\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464787 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-utilities\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.464849 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-catalog-content\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.466661 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck6br"] Dec 08 21:19:30 crc kubenswrapper[4791]: W1208 21:19:30.476070 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3a99522_0cda_4894_8d9a_bc8aaa7763e3.slice/crio-b74f0ce49c959597060eb64a19e9913d57d1a1938c4a75753c46c773ed3f719b WatchSource:0}: Error finding container b74f0ce49c959597060eb64a19e9913d57d1a1938c4a75753c46c773ed3f719b: Status 404 returned error can't find the container with id b74f0ce49c959597060eb64a19e9913d57d1a1938c4a75753c46c773ed3f719b Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.483181 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kct8z\" (UniqueName: \"kubernetes.io/projected/78dfcd05-fbfb-4b65-8d75-2fa345534b21-kube-api-access-kct8z\") pod \"redhat-marketplace-4chcv\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.551081 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.815100 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck6br" event={"ID":"e3a99522-0cda-4894-8d9a-bc8aaa7763e3","Type":"ContainerStarted","Data":"b74f0ce49c959597060eb64a19e9913d57d1a1938c4a75753c46c773ed3f719b"} Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.817814 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.818340 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt" event={"ID":"527bb8ce-24f8-4bcf-a100-457e11dac79d","Type":"ContainerDied","Data":"773e88a83d7a93cbad5a1708268eb559e68704544c012d3248a01d5b1435b9e2"} Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.818400 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="773e88a83d7a93cbad5a1708268eb559e68704544c012d3248a01d5b1435b9e2" Dec 08 21:19:30 crc kubenswrapper[4791]: I1208 21:19:30.956096 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4chcv"] Dec 08 21:19:30 crc kubenswrapper[4791]: W1208 21:19:30.963468 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78dfcd05_fbfb_4b65_8d75_2fa345534b21.slice/crio-676297b3c859d13f9380dd8cdb6d60844361468e1ca5f53bd50133fc95c31a0b WatchSource:0}: Error finding container 676297b3c859d13f9380dd8cdb6d60844361468e1ca5f53bd50133fc95c31a0b: Status 404 returned error can't find the container with id 676297b3c859d13f9380dd8cdb6d60844361468e1ca5f53bd50133fc95c31a0b Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.033306 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g287b"] Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.034574 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.037002 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.043377 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g287b"] Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.174701 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg8vg\" (UniqueName: \"kubernetes.io/projected/f4fae2f4-952d-43c7-b7a2-55c898273973-kube-api-access-vg8vg\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.174834 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-utilities\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.174882 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-catalog-content\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.276139 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-catalog-content\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.276218 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg8vg\" (UniqueName: \"kubernetes.io/projected/f4fae2f4-952d-43c7-b7a2-55c898273973-kube-api-access-vg8vg\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.276279 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-utilities\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.277226 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-catalog-content\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.277395 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-utilities\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.296887 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg8vg\" (UniqueName: \"kubernetes.io/projected/f4fae2f4-952d-43c7-b7a2-55c898273973-kube-api-access-vg8vg\") pod \"redhat-operators-g287b\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.318921 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.351111 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.381492 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-wl6d7 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.381619 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-wl6d7 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.381648 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-wl6d7" podUID="74d9b7de-2712-4320-b41a-1e5c91bd36e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.381589 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wl6d7" podUID="74d9b7de-2712-4320-b41a-1e5c91bd36e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.387169 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.391214 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:31 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:31 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:31 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.391254 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.426028 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qvjv5"] Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.427247 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.437860 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qvjv5"] Dec 08 21:19:31 crc kubenswrapper[4791]: E1208 21:19:31.579795 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.580903 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-utilities\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.581014 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-catalog-content\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.581462 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v94g2\" (UniqueName: \"kubernetes.io/projected/acc683d2-9743-4518-a3b3-63d42dbaf522-kube-api-access-v94g2\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: E1208 21:19:31.586147 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:31 crc kubenswrapper[4791]: E1208 21:19:31.590038 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:31 crc kubenswrapper[4791]: E1208 21:19:31.590094 4791 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.612782 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g287b"] Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.682868 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v94g2\" (UniqueName: \"kubernetes.io/projected/acc683d2-9743-4518-a3b3-63d42dbaf522-kube-api-access-v94g2\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.683352 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-utilities\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.683395 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-catalog-content\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.684014 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-utilities\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.684193 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-catalog-content\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.708917 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v94g2\" (UniqueName: \"kubernetes.io/projected/acc683d2-9743-4518-a3b3-63d42dbaf522-kube-api-access-v94g2\") pod \"redhat-operators-qvjv5\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.783654 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.785611 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.787738 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.788472 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.790223 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.799854 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.866549 4791 generic.go:334] "Generic (PLEG): container finished" podID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerID="41c0878d385be8f173879303f5a73c46b9774cfb9fb2da3f6e3710f0cf29448c" exitCode=0 Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.867640 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck6br" event={"ID":"e3a99522-0cda-4894-8d9a-bc8aaa7763e3","Type":"ContainerDied","Data":"41c0878d385be8f173879303f5a73c46b9774cfb9fb2da3f6e3710f0cf29448c"} Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.886723 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9915efa2-cf01-4cba-b402-fc3e5167bca0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.887060 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9915efa2-cf01-4cba-b402-fc3e5167bca0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.911647 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g287b" event={"ID":"f4fae2f4-952d-43c7-b7a2-55c898273973","Type":"ContainerStarted","Data":"e45df30a5f4ecfaa1c617dc6bf010b56b23d054c91c993b064b67a7b613f61de"} Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.919029 4791 generic.go:334] "Generic (PLEG): container finished" podID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerID="55eb5f51a54706d0f1a6bf85f1c213700d6bbd9f5191b525576e7a6e582d6b5c" exitCode=0 Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.919092 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4chcv" event={"ID":"78dfcd05-fbfb-4b65-8d75-2fa345534b21","Type":"ContainerDied","Data":"55eb5f51a54706d0f1a6bf85f1c213700d6bbd9f5191b525576e7a6e582d6b5c"} Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.919128 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4chcv" event={"ID":"78dfcd05-fbfb-4b65-8d75-2fa345534b21","Type":"ContainerStarted","Data":"676297b3c859d13f9380dd8cdb6d60844361468e1ca5f53bd50133fc95c31a0b"} Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.988622 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9915efa2-cf01-4cba-b402-fc3e5167bca0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.988727 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9915efa2-cf01-4cba-b402-fc3e5167bca0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:31 crc kubenswrapper[4791]: I1208 21:19:31.989072 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9915efa2-cf01-4cba-b402-fc3e5167bca0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.021084 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9915efa2-cf01-4cba-b402-fc3e5167bca0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.109954 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.134863 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qvjv5"] Dec 08 21:19:32 crc kubenswrapper[4791]: W1208 21:19:32.165356 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacc683d2_9743_4518_a3b3_63d42dbaf522.slice/crio-23e41b167e9610592bcdefd905a51c523fecacb092e52b2d20afb9065176e52e WatchSource:0}: Error finding container 23e41b167e9610592bcdefd905a51c523fecacb092e52b2d20afb9065176e52e: Status 404 returned error can't find the container with id 23e41b167e9610592bcdefd905a51c523fecacb092e52b2d20afb9065176e52e Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.391983 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:32 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:32 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:32 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.392547 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.487251 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 21:19:32 crc kubenswrapper[4791]: W1208 21:19:32.528142 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod9915efa2_cf01_4cba_b402_fc3e5167bca0.slice/crio-e63d19c063c448ac3838767418745cb318cc4e34368dca1b29c36ba0be485386 WatchSource:0}: Error finding container e63d19c063c448ac3838767418745cb318cc4e34368dca1b29c36ba0be485386: Status 404 returned error can't find the container with id e63d19c063c448ac3838767418745cb318cc4e34368dca1b29c36ba0be485386 Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.940778 4791 generic.go:334] "Generic (PLEG): container finished" podID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerID="6d90f76047d3869e5ba99ca00e06283d467a4c81b7359f81e9d3a48a43439e7f" exitCode=0 Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.941008 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvjv5" event={"ID":"acc683d2-9743-4518-a3b3-63d42dbaf522","Type":"ContainerDied","Data":"6d90f76047d3869e5ba99ca00e06283d467a4c81b7359f81e9d3a48a43439e7f"} Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.941051 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvjv5" event={"ID":"acc683d2-9743-4518-a3b3-63d42dbaf522","Type":"ContainerStarted","Data":"23e41b167e9610592bcdefd905a51c523fecacb092e52b2d20afb9065176e52e"} Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.947243 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerID="f358533d044f599cab97b460fad5940e9c0f64873e44dc3248ff8e5552ce968b" exitCode=0 Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.947389 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g287b" event={"ID":"f4fae2f4-952d-43c7-b7a2-55c898273973","Type":"ContainerDied","Data":"f358533d044f599cab97b460fad5940e9c0f64873e44dc3248ff8e5552ce968b"} Dec 08 21:19:32 crc kubenswrapper[4791]: I1208 21:19:32.957102 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9915efa2-cf01-4cba-b402-fc3e5167bca0","Type":"ContainerStarted","Data":"e63d19c063c448ac3838767418745cb318cc4e34368dca1b29c36ba0be485386"} Dec 08 21:19:33 crc kubenswrapper[4791]: I1208 21:19:33.401420 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:19:33 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Dec 08 21:19:33 crc kubenswrapper[4791]: [+]process-running ok Dec 08 21:19:33 crc kubenswrapper[4791]: healthz check failed Dec 08 21:19:33 crc kubenswrapper[4791]: I1208 21:19:33.402042 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:19:33 crc kubenswrapper[4791]: I1208 21:19:33.984961 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9915efa2-cf01-4cba-b402-fc3e5167bca0","Type":"ContainerStarted","Data":"103907f80815c6699daa4e3bbfe55c421000c8c46e8ba3cd90aa0ecc2bd1096c"} Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.008264 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.008237895 podStartE2EDuration="3.008237895s" podCreationTimestamp="2025-12-08 21:19:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:34.007563099 +0000 UTC m=+50.706321444" watchObservedRunningTime="2025-12-08 21:19:34.008237895 +0000 UTC m=+50.706996260" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.245359 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.245482 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.245537 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.245589 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.251700 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.256209 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.261443 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.263026 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.265375 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.266848 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.266990 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.287868 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.289454 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.317499 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.327439 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.347621 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.347671 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.354627 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.391636 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.395391 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-hs66g" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.485455 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.485538 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.485958 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.518424 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:34 crc kubenswrapper[4791]: I1208 21:19:34.635960 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:35 crc kubenswrapper[4791]: I1208 21:19:35.046595 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c9583c19a3f899a31c30394e0f54842bb991e2c76ea387b69f8f726090166e14"} Dec 08 21:19:35 crc kubenswrapper[4791]: I1208 21:19:35.056255 4791 generic.go:334] "Generic (PLEG): container finished" podID="9915efa2-cf01-4cba-b402-fc3e5167bca0" containerID="103907f80815c6699daa4e3bbfe55c421000c8c46e8ba3cd90aa0ecc2bd1096c" exitCode=0 Dec 08 21:19:35 crc kubenswrapper[4791]: I1208 21:19:35.056346 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9915efa2-cf01-4cba-b402-fc3e5167bca0","Type":"ContainerDied","Data":"103907f80815c6699daa4e3bbfe55c421000c8c46e8ba3cd90aa0ecc2bd1096c"} Dec 08 21:19:35 crc kubenswrapper[4791]: W1208 21:19:35.060789 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-f3db6cc7183820ff9161c679e12996538598e22a7993d2cfa0ae1fd1ed3dc72a WatchSource:0}: Error finding container f3db6cc7183820ff9161c679e12996538598e22a7993d2cfa0ae1fd1ed3dc72a: Status 404 returned error can't find the container with id f3db6cc7183820ff9161c679e12996538598e22a7993d2cfa0ae1fd1ed3dc72a Dec 08 21:19:35 crc kubenswrapper[4791]: I1208 21:19:35.172150 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 21:19:35 crc kubenswrapper[4791]: W1208 21:19:35.191354 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-aee630dc17a1a84282f73cab9e046de244d7c63be787eb8ca6637aa2799347a6 WatchSource:0}: Error finding container aee630dc17a1a84282f73cab9e046de244d7c63be787eb8ca6637aa2799347a6: Status 404 returned error can't find the container with id aee630dc17a1a84282f73cab9e046de244d7c63be787eb8ca6637aa2799347a6 Dec 08 21:19:35 crc kubenswrapper[4791]: W1208 21:19:35.209814 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod35be78c2_2ce7_4d0a_affe_a4e8a2016a49.slice/crio-c2a7dc27d8043d7dc525386f6c4f6a8aebd6a309e48bcedec033e6664553dd74 WatchSource:0}: Error finding container c2a7dc27d8043d7dc525386f6c4f6a8aebd6a309e48bcedec033e6664553dd74: Status 404 returned error can't find the container with id c2a7dc27d8043d7dc525386f6c4f6a8aebd6a309e48bcedec033e6664553dd74 Dec 08 21:19:35 crc kubenswrapper[4791]: I1208 21:19:35.949619 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:19:35 crc kubenswrapper[4791]: I1208 21:19:35.976119 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.069796 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"794654c1405da1864538ab83bcf9294d782e1e45e567896780a03a83824ada00"} Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.069858 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"aee630dc17a1a84282f73cab9e046de244d7c63be787eb8ca6637aa2799347a6"} Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.071166 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.084662 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6852bdfea714ec5904875dbba3c838f1b30d3c98a90df2d840df7aa306fac991"} Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.084795 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"f3db6cc7183820ff9161c679e12996538598e22a7993d2cfa0ae1fd1ed3dc72a"} Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.091103 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ea1687cd148e3dc6187cd47f99b737e84cd87cde8e5cc37d61f6d6bab8098a40"} Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.096841 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35be78c2-2ce7-4d0a-affe-a4e8a2016a49","Type":"ContainerStarted","Data":"c2a7dc27d8043d7dc525386f6c4f6a8aebd6a309e48bcedec033e6664553dd74"} Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.116577 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=1.11655785 podStartE2EDuration="1.11655785s" podCreationTimestamp="2025-12-08 21:19:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:36.108761067 +0000 UTC m=+52.807519412" watchObservedRunningTime="2025-12-08 21:19:36.11655785 +0000 UTC m=+52.815316195" Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.465127 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.642380 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9915efa2-cf01-4cba-b402-fc3e5167bca0-kubelet-dir\") pod \"9915efa2-cf01-4cba-b402-fc3e5167bca0\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.643063 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9915efa2-cf01-4cba-b402-fc3e5167bca0-kube-api-access\") pod \"9915efa2-cf01-4cba-b402-fc3e5167bca0\" (UID: \"9915efa2-cf01-4cba-b402-fc3e5167bca0\") " Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.642678 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9915efa2-cf01-4cba-b402-fc3e5167bca0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9915efa2-cf01-4cba-b402-fc3e5167bca0" (UID: "9915efa2-cf01-4cba-b402-fc3e5167bca0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.744602 4791 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9915efa2-cf01-4cba-b402-fc3e5167bca0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.855535 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9915efa2-cf01-4cba-b402-fc3e5167bca0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9915efa2-cf01-4cba-b402-fc3e5167bca0" (UID: "9915efa2-cf01-4cba-b402-fc3e5167bca0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.900096 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-7f84n" Dec 08 21:19:36 crc kubenswrapper[4791]: I1208 21:19:36.948866 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9915efa2-cf01-4cba-b402-fc3e5167bca0-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:37 crc kubenswrapper[4791]: I1208 21:19:37.136004 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35be78c2-2ce7-4d0a-affe-a4e8a2016a49","Type":"ContainerStarted","Data":"adcd3491fa0191676a9a56126dccd0b5ea8947a3f7ead47de5d8e966dcb4181a"} Dec 08 21:19:37 crc kubenswrapper[4791]: I1208 21:19:37.148467 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9915efa2-cf01-4cba-b402-fc3e5167bca0","Type":"ContainerDied","Data":"e63d19c063c448ac3838767418745cb318cc4e34368dca1b29c36ba0be485386"} Dec 08 21:19:37 crc kubenswrapper[4791]: I1208 21:19:37.148517 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e63d19c063c448ac3838767418745cb318cc4e34368dca1b29c36ba0be485386" Dec 08 21:19:37 crc kubenswrapper[4791]: I1208 21:19:37.148577 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:19:38 crc kubenswrapper[4791]: I1208 21:19:38.158052 4791 generic.go:334] "Generic (PLEG): container finished" podID="35be78c2-2ce7-4d0a-affe-a4e8a2016a49" containerID="adcd3491fa0191676a9a56126dccd0b5ea8947a3f7ead47de5d8e966dcb4181a" exitCode=0 Dec 08 21:19:38 crc kubenswrapper[4791]: I1208 21:19:38.158120 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35be78c2-2ce7-4d0a-affe-a4e8a2016a49","Type":"ContainerDied","Data":"adcd3491fa0191676a9a56126dccd0b5ea8947a3f7ead47de5d8e966dcb4181a"} Dec 08 21:19:38 crc kubenswrapper[4791]: I1208 21:19:38.934095 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.104810 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kubelet-dir\") pod \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.104919 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "35be78c2-2ce7-4d0a-affe-a4e8a2016a49" (UID: "35be78c2-2ce7-4d0a-affe-a4e8a2016a49"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.105495 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kube-api-access\") pod \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\" (UID: \"35be78c2-2ce7-4d0a-affe-a4e8a2016a49\") " Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.106247 4791 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.111453 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "35be78c2-2ce7-4d0a-affe-a4e8a2016a49" (UID: "35be78c2-2ce7-4d0a-affe-a4e8a2016a49"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.197346 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35be78c2-2ce7-4d0a-affe-a4e8a2016a49","Type":"ContainerDied","Data":"c2a7dc27d8043d7dc525386f6c4f6a8aebd6a309e48bcedec033e6664553dd74"} Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.197424 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2a7dc27d8043d7dc525386f6c4f6a8aebd6a309e48bcedec033e6664553dd74" Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.197591 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.208462 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35be78c2-2ce7-4d0a-affe-a4e8a2016a49-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.616167 4791 patch_prober.go:28] interesting pod/console-f9d7485db-vck6k container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 08 21:19:39 crc kubenswrapper[4791]: I1208 21:19:39.616265 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-vck6k" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 08 21:19:41 crc kubenswrapper[4791]: I1208 21:19:41.400189 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-wl6d7" Dec 08 21:19:41 crc kubenswrapper[4791]: E1208 21:19:41.592601 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:41 crc kubenswrapper[4791]: E1208 21:19:41.596817 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:41 crc kubenswrapper[4791]: E1208 21:19:41.612655 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:41 crc kubenswrapper[4791]: E1208 21:19:41.612743 4791 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:19:44 crc kubenswrapper[4791]: I1208 21:19:44.370084 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:19:46 crc kubenswrapper[4791]: I1208 21:19:46.616736 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 08 21:19:49 crc kubenswrapper[4791]: I1208 21:19:49.138367 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:19:49 crc kubenswrapper[4791]: I1208 21:19:49.172917 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=3.172901343 podStartE2EDuration="3.172901343s" podCreationTimestamp="2025-12-08 21:19:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:19:49.171001918 +0000 UTC m=+65.869760263" watchObservedRunningTime="2025-12-08 21:19:49.172901343 +0000 UTC m=+65.871659688" Dec 08 21:19:49 crc kubenswrapper[4791]: I1208 21:19:49.619122 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:49 crc kubenswrapper[4791]: I1208 21:19:49.622339 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:19:51 crc kubenswrapper[4791]: E1208 21:19:51.578300 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:51 crc kubenswrapper[4791]: E1208 21:19:51.580171 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:51 crc kubenswrapper[4791]: E1208 21:19:51.582126 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:19:51 crc kubenswrapper[4791]: E1208 21:19:51.582183 4791 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:19:58 crc kubenswrapper[4791]: I1208 21:19:58.319655 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-2kt8c_848a44d0-a0f4-48c2-9e4a-f0a4d3329815/kube-multus-additional-cni-plugins/0.log" Dec 08 21:19:58 crc kubenswrapper[4791]: I1208 21:19:58.320138 4791 generic.go:334] "Generic (PLEG): container finished" podID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" exitCode=137 Dec 08 21:19:58 crc kubenswrapper[4791]: I1208 21:19:58.320174 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" event={"ID":"848a44d0-a0f4-48c2-9e4a-f0a4d3329815","Type":"ContainerDied","Data":"809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551"} Dec 08 21:20:01 crc kubenswrapper[4791]: I1208 21:20:01.413118 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7dk77" Dec 08 21:20:01 crc kubenswrapper[4791]: E1208 21:20:01.575322 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:20:01 crc kubenswrapper[4791]: E1208 21:20:01.577346 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:20:01 crc kubenswrapper[4791]: E1208 21:20:01.577541 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:20:01 crc kubenswrapper[4791]: E1208 21:20:01.577568 4791 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:20:04 crc kubenswrapper[4791]: E1208 21:20:04.506937 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 08 21:20:04 crc kubenswrapper[4791]: E1208 21:20:04.507123 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kct8z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4chcv_openshift-marketplace(78dfcd05-fbfb-4b65-8d75-2fa345534b21): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:20:04 crc kubenswrapper[4791]: E1208 21:20:04.509223 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4chcv" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" Dec 08 21:20:07 crc kubenswrapper[4791]: E1208 21:20:07.581333 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4chcv" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.666468 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 21:20:07 crc kubenswrapper[4791]: E1208 21:20:07.666696 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9915efa2-cf01-4cba-b402-fc3e5167bca0" containerName="pruner" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.666726 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9915efa2-cf01-4cba-b402-fc3e5167bca0" containerName="pruner" Dec 08 21:20:07 crc kubenswrapper[4791]: E1208 21:20:07.666745 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35be78c2-2ce7-4d0a-affe-a4e8a2016a49" containerName="pruner" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.666753 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="35be78c2-2ce7-4d0a-affe-a4e8a2016a49" containerName="pruner" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.666876 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9915efa2-cf01-4cba-b402-fc3e5167bca0" containerName="pruner" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.666892 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="35be78c2-2ce7-4d0a-affe-a4e8a2016a49" containerName="pruner" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.667323 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.669860 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.670051 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.676763 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.713609 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0209b505-0086-44ec-b1e2-bc0bad570988-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.713668 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0209b505-0086-44ec-b1e2-bc0bad570988-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.814666 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0209b505-0086-44ec-b1e2-bc0bad570988-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.814779 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0209b505-0086-44ec-b1e2-bc0bad570988-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.814849 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0209b505-0086-44ec-b1e2-bc0bad570988-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.834742 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0209b505-0086-44ec-b1e2-bc0bad570988-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:07 crc kubenswrapper[4791]: I1208 21:20:07.991127 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:09 crc kubenswrapper[4791]: E1208 21:20:09.036365 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 08 21:20:09 crc kubenswrapper[4791]: E1208 21:20:09.036823 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vg8vg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-g287b_openshift-marketplace(f4fae2f4-952d-43c7-b7a2-55c898273973): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:20:09 crc kubenswrapper[4791]: E1208 21:20:09.038052 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-g287b" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" Dec 08 21:20:11 crc kubenswrapper[4791]: E1208 21:20:11.158876 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-g287b" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" Dec 08 21:20:11 crc kubenswrapper[4791]: E1208 21:20:11.576045 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:20:11 crc kubenswrapper[4791]: E1208 21:20:11.576774 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:20:11 crc kubenswrapper[4791]: E1208 21:20:11.577234 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 08 21:20:11 crc kubenswrapper[4791]: E1208 21:20:11.577267 4791 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.450386 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.451219 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.466269 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.571976 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-var-lock\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.572056 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kubelet-dir\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.572078 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kube-api-access\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.674912 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kubelet-dir\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.674992 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kube-api-access\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.675117 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kubelet-dir\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.676584 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-var-lock\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.676681 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-var-lock\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.695796 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kube-api-access\") pod \"installer-9-crc\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:12 crc kubenswrapper[4791]: I1208 21:20:12.780425 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:14 crc kubenswrapper[4791]: E1208 21:20:14.104024 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 08 21:20:14 crc kubenswrapper[4791]: E1208 21:20:14.104190 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n9t5z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dgxzp_openshift-marketplace(102d168e-2cef-47ee-8911-1724e0d1982d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:20:14 crc kubenswrapper[4791]: E1208 21:20:14.105582 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dgxzp" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" Dec 08 21:20:14 crc kubenswrapper[4791]: E1208 21:20:14.171654 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 08 21:20:14 crc kubenswrapper[4791]: E1208 21:20:14.171859 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dls9d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mbbd4_openshift-marketplace(175a64fd-0187-4d28-87f1-76194cac1bf2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:20:14 crc kubenswrapper[4791]: E1208 21:20:14.173025 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mbbd4" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" Dec 08 21:20:14 crc kubenswrapper[4791]: I1208 21:20:14.322149 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.302629 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-dgxzp" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.302658 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mbbd4" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.376407 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-2kt8c_848a44d0-a0f4-48c2-9e4a-f0a4d3329815/kube-multus-additional-cni-plugins/0.log" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.376479 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.402745 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.402928 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qcngh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-npdcj_openshift-marketplace(0decf941-a6e4-485f-afd4-7972d332952a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.404212 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-npdcj" podUID="0decf941-a6e4-485f-afd4-7972d332952a" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.413153 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-2kt8c_848a44d0-a0f4-48c2-9e4a-f0a4d3329815/kube-multus-additional-cni-plugins/0.log" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.413791 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" event={"ID":"848a44d0-a0f4-48c2-9e4a-f0a4d3329815","Type":"ContainerDied","Data":"4b64e78bcd4908bd1a813eb1d3bc6bff6c8a67fbfa59fcc4a60dbbdb07f6f166"} Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.413812 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-2kt8c" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.413883 4791 scope.go:117] "RemoveContainer" containerID="809ab81c5459ff31fc8a256a984e864b4ba0c14a542420796322cf4769ff1551" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.414794 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-npdcj" podUID="0decf941-a6e4-485f-afd4-7972d332952a" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.447860 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.448083 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v94g2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-qvjv5_openshift-marketplace(acc683d2-9743-4518-a3b3-63d42dbaf522): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.449264 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-qvjv5" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.453702 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.454460 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bmjg5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-xfw54_openshift-marketplace(85644602-2976-45a0-a2ae-f324c48d3ed5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:20:15 crc kubenswrapper[4791]: E1208 21:20:15.455904 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-xfw54" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.517206 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-cni-sysctl-allowlist\") pod \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.517267 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khl4h\" (UniqueName: \"kubernetes.io/projected/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-kube-api-access-khl4h\") pod \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.517285 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-tuning-conf-dir\") pod \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.517319 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-ready\") pod \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\" (UID: \"848a44d0-a0f4-48c2-9e4a-f0a4d3329815\") " Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.517412 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "848a44d0-a0f4-48c2-9e4a-f0a4d3329815" (UID: "848a44d0-a0f4-48c2-9e4a-f0a4d3329815"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.517669 4791 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.518213 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "848a44d0-a0f4-48c2-9e4a-f0a4d3329815" (UID: "848a44d0-a0f4-48c2-9e4a-f0a4d3329815"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.518269 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-ready" (OuterVolumeSpecName: "ready") pod "848a44d0-a0f4-48c2-9e4a-f0a4d3329815" (UID: "848a44d0-a0f4-48c2-9e4a-f0a4d3329815"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.527344 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-kube-api-access-khl4h" (OuterVolumeSpecName: "kube-api-access-khl4h") pod "848a44d0-a0f4-48c2-9e4a-f0a4d3329815" (UID: "848a44d0-a0f4-48c2-9e4a-f0a4d3329815"). InnerVolumeSpecName "kube-api-access-khl4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.622395 4791 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-ready\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.622693 4791 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.622720 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khl4h\" (UniqueName: \"kubernetes.io/projected/848a44d0-a0f4-48c2-9e4a-f0a4d3329815-kube-api-access-khl4h\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.628291 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.740640 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-2kt8c"] Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.743405 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-2kt8c"] Dec 08 21:20:15 crc kubenswrapper[4791]: I1208 21:20:15.760049 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.423283 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"31175fa6-cfc1-4032-af15-d8f96bfc29c9","Type":"ContainerStarted","Data":"64499fd416c545ba3c245cb24f79477e9cd51db7b964a60fddd928d7b09377bc"} Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.423532 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"31175fa6-cfc1-4032-af15-d8f96bfc29c9","Type":"ContainerStarted","Data":"fe1f29b09f204cb6eb64a0f9eab6f3f14640bc6b68f454835e031c160ab29cf2"} Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.428780 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0209b505-0086-44ec-b1e2-bc0bad570988","Type":"ContainerStarted","Data":"24e1102ed2a769e52c4fafc9128a35b184b7126938b957a547e114f2782374dc"} Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.429000 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0209b505-0086-44ec-b1e2-bc0bad570988","Type":"ContainerStarted","Data":"810761754b4b83604d105348722ab1346fe0f295f74fc2325ae3fcd3b6e093f4"} Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.430763 4791 generic.go:334] "Generic (PLEG): container finished" podID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerID="6386fac06704b56cce18f7e93445d8e7a59feb94966e3e6dfeb1bf52d9d13435" exitCode=0 Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.430906 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck6br" event={"ID":"e3a99522-0cda-4894-8d9a-bc8aaa7763e3","Type":"ContainerDied","Data":"6386fac06704b56cce18f7e93445d8e7a59feb94966e3e6dfeb1bf52d9d13435"} Dec 08 21:20:16 crc kubenswrapper[4791]: E1208 21:20:16.432410 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-qvjv5" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" Dec 08 21:20:16 crc kubenswrapper[4791]: E1208 21:20:16.432830 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-xfw54" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.442701 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.442683356 podStartE2EDuration="4.442683356s" podCreationTimestamp="2025-12-08 21:20:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:16.440778184 +0000 UTC m=+93.139536529" watchObservedRunningTime="2025-12-08 21:20:16.442683356 +0000 UTC m=+93.141441701" Dec 08 21:20:16 crc kubenswrapper[4791]: I1208 21:20:16.508972 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=9.508950644 podStartE2EDuration="9.508950644s" podCreationTimestamp="2025-12-08 21:20:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:16.497886299 +0000 UTC m=+93.196644654" watchObservedRunningTime="2025-12-08 21:20:16.508950644 +0000 UTC m=+93.207708989" Dec 08 21:20:17 crc kubenswrapper[4791]: I1208 21:20:17.437940 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck6br" event={"ID":"e3a99522-0cda-4894-8d9a-bc8aaa7763e3","Type":"ContainerStarted","Data":"f37ff2d8945007128451f0ea660ea36b550c924d6985bd3540e6b54d26059ef2"} Dec 08 21:20:17 crc kubenswrapper[4791]: I1208 21:20:17.439458 4791 generic.go:334] "Generic (PLEG): container finished" podID="0209b505-0086-44ec-b1e2-bc0bad570988" containerID="24e1102ed2a769e52c4fafc9128a35b184b7126938b957a547e114f2782374dc" exitCode=0 Dec 08 21:20:17 crc kubenswrapper[4791]: I1208 21:20:17.439515 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0209b505-0086-44ec-b1e2-bc0bad570988","Type":"ContainerDied","Data":"24e1102ed2a769e52c4fafc9128a35b184b7126938b957a547e114f2782374dc"} Dec 08 21:20:17 crc kubenswrapper[4791]: I1208 21:20:17.459577 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ck6br" podStartSLOduration=3.4969688420000002 podStartE2EDuration="48.459559412s" podCreationTimestamp="2025-12-08 21:19:29 +0000 UTC" firstStartedPulling="2025-12-08 21:19:31.874001932 +0000 UTC m=+48.572760277" lastFinishedPulling="2025-12-08 21:20:16.836592502 +0000 UTC m=+93.535350847" observedRunningTime="2025-12-08 21:20:17.455802008 +0000 UTC m=+94.154560363" watchObservedRunningTime="2025-12-08 21:20:17.459559412 +0000 UTC m=+94.158317757" Dec 08 21:20:17 crc kubenswrapper[4791]: I1208 21:20:17.604257 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" path="/var/lib/kubelet/pods/848a44d0-a0f4-48c2-9e4a-f0a4d3329815/volumes" Dec 08 21:20:18 crc kubenswrapper[4791]: I1208 21:20:18.734125 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:18 crc kubenswrapper[4791]: I1208 21:20:18.785329 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0209b505-0086-44ec-b1e2-bc0bad570988-kube-api-access\") pod \"0209b505-0086-44ec-b1e2-bc0bad570988\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " Dec 08 21:20:18 crc kubenswrapper[4791]: I1208 21:20:18.785475 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0209b505-0086-44ec-b1e2-bc0bad570988-kubelet-dir\") pod \"0209b505-0086-44ec-b1e2-bc0bad570988\" (UID: \"0209b505-0086-44ec-b1e2-bc0bad570988\") " Dec 08 21:20:18 crc kubenswrapper[4791]: I1208 21:20:18.785663 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0209b505-0086-44ec-b1e2-bc0bad570988-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0209b505-0086-44ec-b1e2-bc0bad570988" (UID: "0209b505-0086-44ec-b1e2-bc0bad570988"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:20:18 crc kubenswrapper[4791]: I1208 21:20:18.887602 4791 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0209b505-0086-44ec-b1e2-bc0bad570988-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:19 crc kubenswrapper[4791]: I1208 21:20:19.144973 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0209b505-0086-44ec-b1e2-bc0bad570988-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0209b505-0086-44ec-b1e2-bc0bad570988" (UID: "0209b505-0086-44ec-b1e2-bc0bad570988"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:20:19 crc kubenswrapper[4791]: I1208 21:20:19.192058 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0209b505-0086-44ec-b1e2-bc0bad570988-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:19 crc kubenswrapper[4791]: I1208 21:20:19.450751 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0209b505-0086-44ec-b1e2-bc0bad570988","Type":"ContainerDied","Data":"810761754b4b83604d105348722ab1346fe0f295f74fc2325ae3fcd3b6e093f4"} Dec 08 21:20:19 crc kubenswrapper[4791]: I1208 21:20:19.451052 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="810761754b4b83604d105348722ab1346fe0f295f74fc2325ae3fcd3b6e093f4" Dec 08 21:20:19 crc kubenswrapper[4791]: I1208 21:20:19.450822 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:20:20 crc kubenswrapper[4791]: I1208 21:20:20.161433 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:20:20 crc kubenswrapper[4791]: I1208 21:20:20.161577 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:20:20 crc kubenswrapper[4791]: I1208 21:20:20.232693 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:20:21 crc kubenswrapper[4791]: I1208 21:20:21.500224 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:20:23 crc kubenswrapper[4791]: I1208 21:20:23.473221 4791 generic.go:334] "Generic (PLEG): container finished" podID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerID="c3b65d0eb862fe081bf55cf4c98afe44a02fee9d686d4997c76355084e28a73b" exitCode=0 Dec 08 21:20:23 crc kubenswrapper[4791]: I1208 21:20:23.473462 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4chcv" event={"ID":"78dfcd05-fbfb-4b65-8d75-2fa345534b21","Type":"ContainerDied","Data":"c3b65d0eb862fe081bf55cf4c98afe44a02fee9d686d4997c76355084e28a73b"} Dec 08 21:20:24 crc kubenswrapper[4791]: I1208 21:20:24.481233 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4chcv" event={"ID":"78dfcd05-fbfb-4b65-8d75-2fa345534b21","Type":"ContainerStarted","Data":"427daade66a9bb37168b4158dc65a4362da94e9058cf664a5e7cba1e8a70fe84"} Dec 08 21:20:25 crc kubenswrapper[4791]: I1208 21:20:25.618350 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4chcv" podStartSLOduration=3.35407681 podStartE2EDuration="55.61832001s" podCreationTimestamp="2025-12-08 21:19:30 +0000 UTC" firstStartedPulling="2025-12-08 21:19:31.933844518 +0000 UTC m=+48.632602863" lastFinishedPulling="2025-12-08 21:20:24.198087708 +0000 UTC m=+100.896846063" observedRunningTime="2025-12-08 21:20:24.506908999 +0000 UTC m=+101.205667334" watchObservedRunningTime="2025-12-08 21:20:25.61832001 +0000 UTC m=+102.317078355" Dec 08 21:20:28 crc kubenswrapper[4791]: I1208 21:20:28.505175 4791 generic.go:334] "Generic (PLEG): container finished" podID="0decf941-a6e4-485f-afd4-7972d332952a" containerID="5b08c9d8990e49d9a7082c8f1372527c08e0452ad30a7aeac03cba5a05d5d863" exitCode=0 Dec 08 21:20:28 crc kubenswrapper[4791]: I1208 21:20:28.505270 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npdcj" event={"ID":"0decf941-a6e4-485f-afd4-7972d332952a","Type":"ContainerDied","Data":"5b08c9d8990e49d9a7082c8f1372527c08e0452ad30a7aeac03cba5a05d5d863"} Dec 08 21:20:28 crc kubenswrapper[4791]: I1208 21:20:28.509613 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g287b" event={"ID":"f4fae2f4-952d-43c7-b7a2-55c898273973","Type":"ContainerDied","Data":"057716cd859d2fecd94372f1c30e482ce3e006bc4ff39a095314f9782b81253d"} Dec 08 21:20:28 crc kubenswrapper[4791]: I1208 21:20:28.509768 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerID="057716cd859d2fecd94372f1c30e482ce3e006bc4ff39a095314f9782b81253d" exitCode=0 Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.517105 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfw54" event={"ID":"85644602-2976-45a0-a2ae-f324c48d3ed5","Type":"ContainerStarted","Data":"781e33c1f6625c5382aec157fba46b9b56849a5aab66b4b4669a9a119dc7d8d7"} Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.521259 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g287b" event={"ID":"f4fae2f4-952d-43c7-b7a2-55c898273973","Type":"ContainerStarted","Data":"0c45b6c3589cbf6c00821b2a723921ccdf269c08d677ab5f1e48ce6bed5e36b1"} Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.523575 4791 generic.go:334] "Generic (PLEG): container finished" podID="102d168e-2cef-47ee-8911-1724e0d1982d" containerID="2ec416b26ab9c9d86226ffe30e8af6b334ea7e1eeb5599f9dcbdcf89d208c27a" exitCode=0 Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.523674 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgxzp" event={"ID":"102d168e-2cef-47ee-8911-1724e0d1982d","Type":"ContainerDied","Data":"2ec416b26ab9c9d86226ffe30e8af6b334ea7e1eeb5599f9dcbdcf89d208c27a"} Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.527496 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvjv5" event={"ID":"acc683d2-9743-4518-a3b3-63d42dbaf522","Type":"ContainerStarted","Data":"07ddb93c39cde39faed9ba9b49276d0508a9d8711a0e0dfdef1474937fa7de83"} Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.530001 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npdcj" event={"ID":"0decf941-a6e4-485f-afd4-7972d332952a","Type":"ContainerStarted","Data":"b13f0a62f14238b27f87c6bbc7ea4f8b347d0e1f448030f2c478f7fa08d7d7a0"} Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.579667 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g287b" podStartSLOduration=2.40613283 podStartE2EDuration="58.579640204s" podCreationTimestamp="2025-12-08 21:19:31 +0000 UTC" firstStartedPulling="2025-12-08 21:19:32.952227735 +0000 UTC m=+49.650986080" lastFinishedPulling="2025-12-08 21:20:29.125735109 +0000 UTC m=+105.824493454" observedRunningTime="2025-12-08 21:20:29.576237609 +0000 UTC m=+106.274995944" watchObservedRunningTime="2025-12-08 21:20:29.579640204 +0000 UTC m=+106.278398549" Dec 08 21:20:29 crc kubenswrapper[4791]: I1208 21:20:29.609883 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-npdcj" podStartSLOduration=3.334864984 podStartE2EDuration="1m2.609861284s" podCreationTimestamp="2025-12-08 21:19:27 +0000 UTC" firstStartedPulling="2025-12-08 21:19:29.808409251 +0000 UTC m=+46.507167596" lastFinishedPulling="2025-12-08 21:20:29.083405551 +0000 UTC m=+105.782163896" observedRunningTime="2025-12-08 21:20:29.605485967 +0000 UTC m=+106.304244312" watchObservedRunningTime="2025-12-08 21:20:29.609861284 +0000 UTC m=+106.308619629" Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.539514 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgxzp" event={"ID":"102d168e-2cef-47ee-8911-1724e0d1982d","Type":"ContainerStarted","Data":"966787e2a2d7fd7db678dd7abf6c7d151ee7f743442764665fd356ca8cf19eab"} Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.542516 4791 generic.go:334] "Generic (PLEG): container finished" podID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerID="07ddb93c39cde39faed9ba9b49276d0508a9d8711a0e0dfdef1474937fa7de83" exitCode=0 Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.542592 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvjv5" event={"ID":"acc683d2-9743-4518-a3b3-63d42dbaf522","Type":"ContainerDied","Data":"07ddb93c39cde39faed9ba9b49276d0508a9d8711a0e0dfdef1474937fa7de83"} Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.545954 4791 generic.go:334] "Generic (PLEG): container finished" podID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerID="781e33c1f6625c5382aec157fba46b9b56849a5aab66b4b4669a9a119dc7d8d7" exitCode=0 Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.546148 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfw54" event={"ID":"85644602-2976-45a0-a2ae-f324c48d3ed5","Type":"ContainerDied","Data":"781e33c1f6625c5382aec157fba46b9b56849a5aab66b4b4669a9a119dc7d8d7"} Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.552304 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.552368 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.569520 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dgxzp" podStartSLOduration=2.395096968 podStartE2EDuration="1m2.569493732s" podCreationTimestamp="2025-12-08 21:19:28 +0000 UTC" firstStartedPulling="2025-12-08 21:19:29.800628149 +0000 UTC m=+46.499386494" lastFinishedPulling="2025-12-08 21:20:29.975024913 +0000 UTC m=+106.673783258" observedRunningTime="2025-12-08 21:20:30.56805507 +0000 UTC m=+107.266813435" watchObservedRunningTime="2025-12-08 21:20:30.569493732 +0000 UTC m=+107.268252077" Dec 08 21:20:30 crc kubenswrapper[4791]: I1208 21:20:30.606984 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:20:31 crc kubenswrapper[4791]: I1208 21:20:31.352114 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:20:31 crc kubenswrapper[4791]: I1208 21:20:31.352174 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:20:31 crc kubenswrapper[4791]: I1208 21:20:31.555111 4791 generic.go:334] "Generic (PLEG): container finished" podID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerID="92c1f96e0fab36b85069cb648bc43c934e0984c62caba0396b8244061fb7a923" exitCode=0 Dec 08 21:20:31 crc kubenswrapper[4791]: I1208 21:20:31.555159 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbbd4" event={"ID":"175a64fd-0187-4d28-87f1-76194cac1bf2","Type":"ContainerDied","Data":"92c1f96e0fab36b85069cb648bc43c934e0984c62caba0396b8244061fb7a923"} Dec 08 21:20:31 crc kubenswrapper[4791]: I1208 21:20:31.604186 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:20:32 crc kubenswrapper[4791]: I1208 21:20:32.395604 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-g287b" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="registry-server" probeResult="failure" output=< Dec 08 21:20:32 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 21:20:32 crc kubenswrapper[4791]: > Dec 08 21:20:32 crc kubenswrapper[4791]: I1208 21:20:32.563791 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvjv5" event={"ID":"acc683d2-9743-4518-a3b3-63d42dbaf522","Type":"ContainerStarted","Data":"c4e251d001f0bc32b28c744015beecda19e73bd42aefcda51cc2a383c764094a"} Dec 08 21:20:32 crc kubenswrapper[4791]: I1208 21:20:32.566962 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfw54" event={"ID":"85644602-2976-45a0-a2ae-f324c48d3ed5","Type":"ContainerStarted","Data":"7fa25c3595fd64436aa1be537033e591c299ef5d2fd5577494feee05d94dfe75"} Dec 08 21:20:32 crc kubenswrapper[4791]: I1208 21:20:32.589849 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qvjv5" podStartSLOduration=2.7790329209999998 podStartE2EDuration="1m1.589827728s" podCreationTimestamp="2025-12-08 21:19:31 +0000 UTC" firstStartedPulling="2025-12-08 21:19:32.944516724 +0000 UTC m=+49.643275069" lastFinishedPulling="2025-12-08 21:20:31.755311531 +0000 UTC m=+108.454069876" observedRunningTime="2025-12-08 21:20:32.589374868 +0000 UTC m=+109.288133223" watchObservedRunningTime="2025-12-08 21:20:32.589827728 +0000 UTC m=+109.288586073" Dec 08 21:20:32 crc kubenswrapper[4791]: I1208 21:20:32.614351 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xfw54" podStartSLOduration=2.69112642 podStartE2EDuration="1m4.61431879s" podCreationTimestamp="2025-12-08 21:19:28 +0000 UTC" firstStartedPulling="2025-12-08 21:19:29.796342308 +0000 UTC m=+46.495100643" lastFinishedPulling="2025-12-08 21:20:31.719534668 +0000 UTC m=+108.418293013" observedRunningTime="2025-12-08 21:20:32.610490285 +0000 UTC m=+109.309248630" watchObservedRunningTime="2025-12-08 21:20:32.61431879 +0000 UTC m=+109.313077135" Dec 08 21:20:33 crc kubenswrapper[4791]: I1208 21:20:33.574677 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbbd4" event={"ID":"175a64fd-0187-4d28-87f1-76194cac1bf2","Type":"ContainerStarted","Data":"c58f081ecf66fdacb92330b44f7c9641e832576bc3c29cebfe72777ed49582e7"} Dec 08 21:20:33 crc kubenswrapper[4791]: I1208 21:20:33.611978 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mbbd4" podStartSLOduration=3.287840402 podStartE2EDuration="1m5.611951071s" podCreationTimestamp="2025-12-08 21:19:28 +0000 UTC" firstStartedPulling="2025-12-08 21:19:29.805881052 +0000 UTC m=+46.504639397" lastFinishedPulling="2025-12-08 21:20:32.129991721 +0000 UTC m=+108.828750066" observedRunningTime="2025-12-08 21:20:33.60426437 +0000 UTC m=+110.303022725" watchObservedRunningTime="2025-12-08 21:20:33.611951071 +0000 UTC m=+110.310709416" Dec 08 21:20:34 crc kubenswrapper[4791]: I1208 21:20:34.638328 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4chcv"] Dec 08 21:20:34 crc kubenswrapper[4791]: I1208 21:20:34.638971 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4chcv" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="registry-server" containerID="cri-o://427daade66a9bb37168b4158dc65a4362da94e9058cf664a5e7cba1e8a70fe84" gracePeriod=2 Dec 08 21:20:35 crc kubenswrapper[4791]: I1208 21:20:35.589437 4791 generic.go:334] "Generic (PLEG): container finished" podID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerID="427daade66a9bb37168b4158dc65a4362da94e9058cf664a5e7cba1e8a70fe84" exitCode=0 Dec 08 21:20:35 crc kubenswrapper[4791]: I1208 21:20:35.589514 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4chcv" event={"ID":"78dfcd05-fbfb-4b65-8d75-2fa345534b21","Type":"ContainerDied","Data":"427daade66a9bb37168b4158dc65a4362da94e9058cf664a5e7cba1e8a70fe84"} Dec 08 21:20:36 crc kubenswrapper[4791]: I1208 21:20:36.946865 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.036393 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-catalog-content\") pod \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.037840 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-utilities\") pod \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.037909 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kct8z\" (UniqueName: \"kubernetes.io/projected/78dfcd05-fbfb-4b65-8d75-2fa345534b21-kube-api-access-kct8z\") pod \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\" (UID: \"78dfcd05-fbfb-4b65-8d75-2fa345534b21\") " Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.038506 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-utilities" (OuterVolumeSpecName: "utilities") pod "78dfcd05-fbfb-4b65-8d75-2fa345534b21" (UID: "78dfcd05-fbfb-4b65-8d75-2fa345534b21"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.044920 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78dfcd05-fbfb-4b65-8d75-2fa345534b21-kube-api-access-kct8z" (OuterVolumeSpecName: "kube-api-access-kct8z") pod "78dfcd05-fbfb-4b65-8d75-2fa345534b21" (UID: "78dfcd05-fbfb-4b65-8d75-2fa345534b21"). InnerVolumeSpecName "kube-api-access-kct8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.060142 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78dfcd05-fbfb-4b65-8d75-2fa345534b21" (UID: "78dfcd05-fbfb-4b65-8d75-2fa345534b21"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.139636 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.139679 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dfcd05-fbfb-4b65-8d75-2fa345534b21-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.139689 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kct8z\" (UniqueName: \"kubernetes.io/projected/78dfcd05-fbfb-4b65-8d75-2fa345534b21-kube-api-access-kct8z\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.615885 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4chcv" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.618035 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4chcv" event={"ID":"78dfcd05-fbfb-4b65-8d75-2fa345534b21","Type":"ContainerDied","Data":"676297b3c859d13f9380dd8cdb6d60844361468e1ca5f53bd50133fc95c31a0b"} Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.618147 4791 scope.go:117] "RemoveContainer" containerID="427daade66a9bb37168b4158dc65a4362da94e9058cf664a5e7cba1e8a70fe84" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.642293 4791 scope.go:117] "RemoveContainer" containerID="c3b65d0eb862fe081bf55cf4c98afe44a02fee9d686d4997c76355084e28a73b" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.666848 4791 scope.go:117] "RemoveContainer" containerID="55eb5f51a54706d0f1a6bf85f1c213700d6bbd9f5191b525576e7a6e582d6b5c" Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.672700 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4chcv"] Dec 08 21:20:37 crc kubenswrapper[4791]: I1208 21:20:37.676953 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4chcv"] Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.431244 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.431302 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.443994 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.444047 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.482505 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.546204 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.546262 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.586859 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.729092 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.731005 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.731654 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.741926 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.742052 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.787130 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:20:38 crc kubenswrapper[4791]: I1208 21:20:38.787455 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:20:39 crc kubenswrapper[4791]: I1208 21:20:39.605477 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" path="/var/lib/kubelet/pods/78dfcd05-fbfb-4b65-8d75-2fa345534b21/volumes" Dec 08 21:20:39 crc kubenswrapper[4791]: I1208 21:20:39.682296 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:20:40 crc kubenswrapper[4791]: I1208 21:20:40.837095 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xfw54"] Dec 08 21:20:40 crc kubenswrapper[4791]: I1208 21:20:40.838243 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xfw54" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="registry-server" containerID="cri-o://7fa25c3595fd64436aa1be537033e591c299ef5d2fd5577494feee05d94dfe75" gracePeriod=2 Dec 08 21:20:41 crc kubenswrapper[4791]: I1208 21:20:41.401090 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:20:41 crc kubenswrapper[4791]: I1208 21:20:41.446890 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:20:41 crc kubenswrapper[4791]: I1208 21:20:41.788830 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:20:41 crc kubenswrapper[4791]: I1208 21:20:41.789147 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:20:41 crc kubenswrapper[4791]: I1208 21:20:41.827672 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:20:42 crc kubenswrapper[4791]: I1208 21:20:42.426949 4791 patch_prober.go:28] interesting pod/router-default-5444994796-hs66g container/router namespace/openshift-ingress: Readiness probe status=failure output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 21:20:42 crc kubenswrapper[4791]: I1208 21:20:42.427017 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-ingress/router-default-5444994796-hs66g" podUID="4a64d896-f396-4347-9e7a-091e9741b884" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:20:42 crc kubenswrapper[4791]: I1208 21:20:42.683495 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:20:43 crc kubenswrapper[4791]: I1208 21:20:43.036455 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgxzp"] Dec 08 21:20:43 crc kubenswrapper[4791]: I1208 21:20:43.036791 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dgxzp" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="registry-server" containerID="cri-o://966787e2a2d7fd7db678dd7abf6c7d151ee7f743442764665fd356ca8cf19eab" gracePeriod=2 Dec 08 21:20:43 crc kubenswrapper[4791]: I1208 21:20:43.652432 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xfw54_85644602-2976-45a0-a2ae-f324c48d3ed5/registry-server/0.log" Dec 08 21:20:43 crc kubenswrapper[4791]: I1208 21:20:43.653615 4791 generic.go:334] "Generic (PLEG): container finished" podID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerID="7fa25c3595fd64436aa1be537033e591c299ef5d2fd5577494feee05d94dfe75" exitCode=137 Dec 08 21:20:43 crc kubenswrapper[4791]: I1208 21:20:43.653698 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfw54" event={"ID":"85644602-2976-45a0-a2ae-f324c48d3ed5","Type":"ContainerDied","Data":"7fa25c3595fd64436aa1be537033e591c299ef5d2fd5577494feee05d94dfe75"} Dec 08 21:20:43 crc kubenswrapper[4791]: I1208 21:20:43.655934 4791 generic.go:334] "Generic (PLEG): container finished" podID="102d168e-2cef-47ee-8911-1724e0d1982d" containerID="966787e2a2d7fd7db678dd7abf6c7d151ee7f743442764665fd356ca8cf19eab" exitCode=0 Dec 08 21:20:43 crc kubenswrapper[4791]: I1208 21:20:43.656010 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgxzp" event={"ID":"102d168e-2cef-47ee-8911-1724e0d1982d","Type":"ContainerDied","Data":"966787e2a2d7fd7db678dd7abf6c7d151ee7f743442764665fd356ca8cf19eab"} Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.444407 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qvjv5"] Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.494529 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.616611 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xfw54_85644602-2976-45a0-a2ae-f324c48d3ed5/registry-server/0.log" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.617576 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.663942 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9t5z\" (UniqueName: \"kubernetes.io/projected/102d168e-2cef-47ee-8911-1724e0d1982d-kube-api-access-n9t5z\") pod \"102d168e-2cef-47ee-8911-1724e0d1982d\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.664027 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-utilities\") pod \"102d168e-2cef-47ee-8911-1724e0d1982d\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.664074 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-catalog-content\") pod \"102d168e-2cef-47ee-8911-1724e0d1982d\" (UID: \"102d168e-2cef-47ee-8911-1724e0d1982d\") " Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.664391 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-catalog-content\") pod \"85644602-2976-45a0-a2ae-f324c48d3ed5\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.664436 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmjg5\" (UniqueName: \"kubernetes.io/projected/85644602-2976-45a0-a2ae-f324c48d3ed5-kube-api-access-bmjg5\") pod \"85644602-2976-45a0-a2ae-f324c48d3ed5\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.664932 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-utilities" (OuterVolumeSpecName: "utilities") pod "102d168e-2cef-47ee-8911-1724e0d1982d" (UID: "102d168e-2cef-47ee-8911-1724e0d1982d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.671964 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/102d168e-2cef-47ee-8911-1724e0d1982d-kube-api-access-n9t5z" (OuterVolumeSpecName: "kube-api-access-n9t5z") pod "102d168e-2cef-47ee-8911-1724e0d1982d" (UID: "102d168e-2cef-47ee-8911-1724e0d1982d"). InnerVolumeSpecName "kube-api-access-n9t5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.681794 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85644602-2976-45a0-a2ae-f324c48d3ed5-kube-api-access-bmjg5" (OuterVolumeSpecName: "kube-api-access-bmjg5") pod "85644602-2976-45a0-a2ae-f324c48d3ed5" (UID: "85644602-2976-45a0-a2ae-f324c48d3ed5"). InnerVolumeSpecName "kube-api-access-bmjg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.683575 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xfw54_85644602-2976-45a0-a2ae-f324c48d3ed5/registry-server/0.log" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.689420 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfw54" event={"ID":"85644602-2976-45a0-a2ae-f324c48d3ed5","Type":"ContainerDied","Data":"8d1d1c61ec27b252a13030550c7aa221a02039fdbfd7b7db6ab19ac52d364d4b"} Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.689472 4791 scope.go:117] "RemoveContainer" containerID="7fa25c3595fd64436aa1be537033e591c299ef5d2fd5577494feee05d94dfe75" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.689610 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfw54" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.705290 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qvjv5" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="registry-server" containerID="cri-o://c4e251d001f0bc32b28c744015beecda19e73bd42aefcda51cc2a383c764094a" gracePeriod=2 Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.705469 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgxzp" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.705817 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgxzp" event={"ID":"102d168e-2cef-47ee-8911-1724e0d1982d","Type":"ContainerDied","Data":"e9372d8229ccc95491b6aa18c5efa7f5217e6cee73ae00ac4505fdf5117c61a6"} Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.736037 4791 scope.go:117] "RemoveContainer" containerID="781e33c1f6625c5382aec157fba46b9b56849a5aab66b4b4669a9a119dc7d8d7" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.752362 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "102d168e-2cef-47ee-8911-1724e0d1982d" (UID: "102d168e-2cef-47ee-8911-1724e0d1982d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.753503 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "85644602-2976-45a0-a2ae-f324c48d3ed5" (UID: "85644602-2976-45a0-a2ae-f324c48d3ed5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.761951 4791 scope.go:117] "RemoveContainer" containerID="724add11e0d6cb434e1283c59e4f037adcf8d45c7ffc5fbd3e7b1b635198b396" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.768183 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-utilities\") pod \"85644602-2976-45a0-a2ae-f324c48d3ed5\" (UID: \"85644602-2976-45a0-a2ae-f324c48d3ed5\") " Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.768543 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.768556 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.768569 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmjg5\" (UniqueName: \"kubernetes.io/projected/85644602-2976-45a0-a2ae-f324c48d3ed5-kube-api-access-bmjg5\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.768581 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9t5z\" (UniqueName: \"kubernetes.io/projected/102d168e-2cef-47ee-8911-1724e0d1982d-kube-api-access-n9t5z\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.768590 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/102d168e-2cef-47ee-8911-1724e0d1982d-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.770557 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-utilities" (OuterVolumeSpecName: "utilities") pod "85644602-2976-45a0-a2ae-f324c48d3ed5" (UID: "85644602-2976-45a0-a2ae-f324c48d3ed5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.779803 4791 scope.go:117] "RemoveContainer" containerID="966787e2a2d7fd7db678dd7abf6c7d151ee7f743442764665fd356ca8cf19eab" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.798169 4791 scope.go:117] "RemoveContainer" containerID="2ec416b26ab9c9d86226ffe30e8af6b334ea7e1eeb5599f9dcbdcf89d208c27a" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.813114 4791 scope.go:117] "RemoveContainer" containerID="541db9b23bbe04f31fc4bb9d6f32d1b9124bd8f426907f81cbfce29884cbb6e5" Dec 08 21:20:45 crc kubenswrapper[4791]: I1208 21:20:45.870273 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85644602-2976-45a0-a2ae-f324c48d3ed5-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:46 crc kubenswrapper[4791]: I1208 21:20:46.016598 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xfw54"] Dec 08 21:20:46 crc kubenswrapper[4791]: I1208 21:20:46.026099 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xfw54"] Dec 08 21:20:46 crc kubenswrapper[4791]: I1208 21:20:46.042356 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgxzp"] Dec 08 21:20:46 crc kubenswrapper[4791]: I1208 21:20:46.047432 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dgxzp"] Dec 08 21:20:47 crc kubenswrapper[4791]: I1208 21:20:47.608292 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" path="/var/lib/kubelet/pods/102d168e-2cef-47ee-8911-1724e0d1982d/volumes" Dec 08 21:20:47 crc kubenswrapper[4791]: I1208 21:20:47.609289 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" path="/var/lib/kubelet/pods/85644602-2976-45a0-a2ae-f324c48d3ed5/volumes" Dec 08 21:20:47 crc kubenswrapper[4791]: I1208 21:20:47.722506 4791 generic.go:334] "Generic (PLEG): container finished" podID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerID="c4e251d001f0bc32b28c744015beecda19e73bd42aefcda51cc2a383c764094a" exitCode=0 Dec 08 21:20:47 crc kubenswrapper[4791]: I1208 21:20:47.722896 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvjv5" event={"ID":"acc683d2-9743-4518-a3b3-63d42dbaf522","Type":"ContainerDied","Data":"c4e251d001f0bc32b28c744015beecda19e73bd42aefcda51cc2a383c764094a"} Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.305112 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.309106 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zll8d"] Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.407705 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-catalog-content\") pod \"acc683d2-9743-4518-a3b3-63d42dbaf522\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.408099 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-utilities\") pod \"acc683d2-9743-4518-a3b3-63d42dbaf522\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.408137 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v94g2\" (UniqueName: \"kubernetes.io/projected/acc683d2-9743-4518-a3b3-63d42dbaf522-kube-api-access-v94g2\") pod \"acc683d2-9743-4518-a3b3-63d42dbaf522\" (UID: \"acc683d2-9743-4518-a3b3-63d42dbaf522\") " Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.409538 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-utilities" (OuterVolumeSpecName: "utilities") pod "acc683d2-9743-4518-a3b3-63d42dbaf522" (UID: "acc683d2-9743-4518-a3b3-63d42dbaf522"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.429094 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acc683d2-9743-4518-a3b3-63d42dbaf522-kube-api-access-v94g2" (OuterVolumeSpecName: "kube-api-access-v94g2") pod "acc683d2-9743-4518-a3b3-63d42dbaf522" (UID: "acc683d2-9743-4518-a3b3-63d42dbaf522"). InnerVolumeSpecName "kube-api-access-v94g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.512169 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.512222 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v94g2\" (UniqueName: \"kubernetes.io/projected/acc683d2-9743-4518-a3b3-63d42dbaf522-kube-api-access-v94g2\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.531407 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "acc683d2-9743-4518-a3b3-63d42dbaf522" (UID: "acc683d2-9743-4518-a3b3-63d42dbaf522"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.613857 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acc683d2-9743-4518-a3b3-63d42dbaf522-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.731578 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvjv5" event={"ID":"acc683d2-9743-4518-a3b3-63d42dbaf522","Type":"ContainerDied","Data":"23e41b167e9610592bcdefd905a51c523fecacb092e52b2d20afb9065176e52e"} Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.731639 4791 scope.go:117] "RemoveContainer" containerID="c4e251d001f0bc32b28c744015beecda19e73bd42aefcda51cc2a383c764094a" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.731789 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvjv5" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.749840 4791 scope.go:117] "RemoveContainer" containerID="07ddb93c39cde39faed9ba9b49276d0508a9d8711a0e0dfdef1474937fa7de83" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.767694 4791 scope.go:117] "RemoveContainer" containerID="6d90f76047d3869e5ba99ca00e06283d467a4c81b7359f81e9d3a48a43439e7f" Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.774077 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qvjv5"] Dec 08 21:20:48 crc kubenswrapper[4791]: I1208 21:20:48.781000 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qvjv5"] Dec 08 21:20:49 crc kubenswrapper[4791]: I1208 21:20:49.612882 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" path="/var/lib/kubelet/pods/acc683d2-9743-4518-a3b3-63d42dbaf522/volumes" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.644093 4791 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645222 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645240 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645250 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645257 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645271 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645277 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645286 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645293 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645301 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0209b505-0086-44ec-b1e2-bc0bad570988" containerName="pruner" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645307 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0209b505-0086-44ec-b1e2-bc0bad570988" containerName="pruner" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645315 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645321 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645331 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645336 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645344 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645350 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="extract-content" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645359 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645367 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645376 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645382 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645391 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645397 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645406 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645413 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645425 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645431 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.645446 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645453 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="extract-utilities" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.645979 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="85644602-2976-45a0-a2ae-f324c48d3ed5" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646047 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="acc683d2-9743-4518-a3b3-63d42dbaf522" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646066 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="848a44d0-a0f4-48c2-9e4a-f0a4d3329815" containerName="kube-multus-additional-cni-plugins" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646078 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="78dfcd05-fbfb-4b65-8d75-2fa345534b21" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646088 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0209b505-0086-44ec-b1e2-bc0bad570988" containerName="pruner" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646108 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="102d168e-2cef-47ee-8911-1724e0d1982d" containerName="registry-server" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646644 4791 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646682 4791 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.646963 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.647224 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647268 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.647282 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647319 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.647332 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647340 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.647354 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647362 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.647375 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647382 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 08 21:20:53 crc kubenswrapper[4791]: E1208 21:20:53.647389 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647396 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647538 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647554 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647565 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647574 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647583 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647758 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2" gracePeriod=15 Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647819 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1" gracePeriod=15 Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647816 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018" gracePeriod=15 Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647818 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28" gracePeriod=15 Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.647882 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2" gracePeriod=15 Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.650954 4791 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700339 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700418 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700474 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700507 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700561 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700608 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700650 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.700678 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804166 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804216 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804247 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804280 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804305 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804330 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804351 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804380 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804448 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804494 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804487 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804520 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804542 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804568 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804568 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:53 crc kubenswrapper[4791]: I1208 21:20:53.804590 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:54 crc kubenswrapper[4791]: E1208 21:20:54.708664 4791 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:54 crc kubenswrapper[4791]: E1208 21:20:54.709338 4791 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:54 crc kubenswrapper[4791]: E1208 21:20:54.709669 4791 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:54 crc kubenswrapper[4791]: E1208 21:20:54.710108 4791 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:54 crc kubenswrapper[4791]: E1208 21:20:54.710604 4791 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.710654 4791 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 08 21:20:54 crc kubenswrapper[4791]: E1208 21:20:54.710961 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="200ms" Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.782085 4791 generic.go:334] "Generic (PLEG): container finished" podID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" containerID="64499fd416c545ba3c245cb24f79477e9cd51db7b964a60fddd928d7b09377bc" exitCode=0 Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.782152 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"31175fa6-cfc1-4032-af15-d8f96bfc29c9","Type":"ContainerDied","Data":"64499fd416c545ba3c245cb24f79477e9cd51db7b964a60fddd928d7b09377bc"} Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.783338 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.786480 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.787332 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2" exitCode=0 Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.787380 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018" exitCode=0 Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.787399 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28" exitCode=0 Dec 08 21:20:54 crc kubenswrapper[4791]: I1208 21:20:54.787417 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1" exitCode=2 Dec 08 21:20:54 crc kubenswrapper[4791]: E1208 21:20:54.912319 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="400ms" Dec 08 21:20:55 crc kubenswrapper[4791]: E1208 21:20:55.313566 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="800ms" Dec 08 21:20:55 crc kubenswrapper[4791]: E1208 21:20:55.630828 4791 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.173:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" volumeName="registry-storage" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.080353 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.081791 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.082750 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.083235 4791 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: E1208 21:20:56.115044 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="1.6s" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.123280 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.124068 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.124529 4791 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239740 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kubelet-dir\") pod \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239822 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239877 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-var-lock\") pod \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239902 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239965 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kube-api-access\") pod \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\" (UID: \"31175fa6-cfc1-4032-af15-d8f96bfc29c9\") " Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239894 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "31175fa6-cfc1-4032-af15-d8f96bfc29c9" (UID: "31175fa6-cfc1-4032-af15-d8f96bfc29c9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.240011 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.240006 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239992 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239919 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-var-lock" (OuterVolumeSpecName: "var-lock") pod "31175fa6-cfc1-4032-af15-d8f96bfc29c9" (UID: "31175fa6-cfc1-4032-af15-d8f96bfc29c9"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.239964 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.240326 4791 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.240341 4791 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.240350 4791 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.240358 4791 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/31175fa6-cfc1-4032-af15-d8f96bfc29c9-var-lock\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.240367 4791 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.247507 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "31175fa6-cfc1-4032-af15-d8f96bfc29c9" (UID: "31175fa6-cfc1-4032-af15-d8f96bfc29c9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.341645 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/31175fa6-cfc1-4032-af15-d8f96bfc29c9-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.837734 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.837766 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"31175fa6-cfc1-4032-af15-d8f96bfc29c9","Type":"ContainerDied","Data":"fe1f29b09f204cb6eb64a0f9eab6f3f14640bc6b68f454835e031c160ab29cf2"} Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.838204 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe1f29b09f204cb6eb64a0f9eab6f3f14640bc6b68f454835e031c160ab29cf2" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.841222 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.842036 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2" exitCode=0 Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.842242 4791 scope.go:117] "RemoveContainer" containerID="ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.842266 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.855187 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.856127 4791 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.864781 4791 scope.go:117] "RemoveContainer" containerID="c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.867169 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.867536 4791 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.885813 4791 scope.go:117] "RemoveContainer" containerID="20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.900679 4791 scope.go:117] "RemoveContainer" containerID="20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.918530 4791 scope.go:117] "RemoveContainer" containerID="4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.933272 4791 scope.go:117] "RemoveContainer" containerID="43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.953446 4791 scope.go:117] "RemoveContainer" containerID="ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2" Dec 08 21:20:56 crc kubenswrapper[4791]: E1208 21:20:56.954112 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\": container with ID starting with ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2 not found: ID does not exist" containerID="ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.954165 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2"} err="failed to get container status \"ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\": rpc error: code = NotFound desc = could not find container \"ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2\": container with ID starting with ad31c2445a3c19bedfc0d319262fdcc11fe2ba317881450f100cd4e67e2355f2 not found: ID does not exist" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.954238 4791 scope.go:117] "RemoveContainer" containerID="c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018" Dec 08 21:20:56 crc kubenswrapper[4791]: E1208 21:20:56.954638 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\": container with ID starting with c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018 not found: ID does not exist" containerID="c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.954668 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018"} err="failed to get container status \"c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\": rpc error: code = NotFound desc = could not find container \"c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018\": container with ID starting with c153324bc4f35dd06b193de5ef89462ab658a50a1384b9eb6f5b3b5ccf0b2018 not found: ID does not exist" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.954687 4791 scope.go:117] "RemoveContainer" containerID="20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28" Dec 08 21:20:56 crc kubenswrapper[4791]: E1208 21:20:56.955019 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\": container with ID starting with 20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28 not found: ID does not exist" containerID="20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.955259 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28"} err="failed to get container status \"20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\": rpc error: code = NotFound desc = could not find container \"20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28\": container with ID starting with 20b454a2e59b4533af556e4f5175b49175c7b4656861477cec02573e09f27b28 not found: ID does not exist" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.955289 4791 scope.go:117] "RemoveContainer" containerID="20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1" Dec 08 21:20:56 crc kubenswrapper[4791]: E1208 21:20:56.955588 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\": container with ID starting with 20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1 not found: ID does not exist" containerID="20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.955675 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1"} err="failed to get container status \"20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\": rpc error: code = NotFound desc = could not find container \"20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1\": container with ID starting with 20ffa577a6292eb443cd5673c9d3a3833627cd38e63b153db5627047dc57b8f1 not found: ID does not exist" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.955813 4791 scope.go:117] "RemoveContainer" containerID="4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2" Dec 08 21:20:56 crc kubenswrapper[4791]: E1208 21:20:56.957531 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\": container with ID starting with 4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2 not found: ID does not exist" containerID="4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.957622 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2"} err="failed to get container status \"4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\": rpc error: code = NotFound desc = could not find container \"4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2\": container with ID starting with 4787a6a226550bb0d96b1b8af8059a6d349d659bad8718ef1df246a66b1e0ac2 not found: ID does not exist" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.957692 4791 scope.go:117] "RemoveContainer" containerID="43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18" Dec 08 21:20:56 crc kubenswrapper[4791]: E1208 21:20:56.957957 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\": container with ID starting with 43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18 not found: ID does not exist" containerID="43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18" Dec 08 21:20:56 crc kubenswrapper[4791]: I1208 21:20:56.958076 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18"} err="failed to get container status \"43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\": rpc error: code = NotFound desc = could not find container \"43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18\": container with ID starting with 43caf1d96588f00bed09f4c04e0bfbfc5769848aeed2daf0fbc69ecc9834cb18 not found: ID does not exist" Dec 08 21:20:57 crc kubenswrapper[4791]: I1208 21:20:57.604620 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 08 21:20:57 crc kubenswrapper[4791]: E1208 21:20:57.716201 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="3.2s" Dec 08 21:20:58 crc kubenswrapper[4791]: E1208 21:20:58.678790 4791 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.173:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:58 crc kubenswrapper[4791]: I1208 21:20:58.679483 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:20:58 crc kubenswrapper[4791]: E1208 21:20:58.709665 4791 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.173:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187f5a4b5e835c44 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 21:20:58.707237956 +0000 UTC m=+135.405996301,LastTimestamp:2025-12-08 21:20:58.707237956 +0000 UTC m=+135.405996301,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 21:20:58 crc kubenswrapper[4791]: I1208 21:20:58.856987 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"aa97a54c72ea9771cfdf3fed3fe3e3e0524ec087131021b6b2accae44f31da09"} Dec 08 21:20:59 crc kubenswrapper[4791]: I1208 21:20:59.863374 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"5ab67524ee5c02e932a26ac6ae7a644db172cae53f270f44137fbfc80dbd95c3"} Dec 08 21:20:59 crc kubenswrapper[4791]: I1208 21:20:59.864045 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:20:59 crc kubenswrapper[4791]: E1208 21:20:59.864061 4791 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.173:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:21:00 crc kubenswrapper[4791]: E1208 21:21:00.871145 4791 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.173:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:21:00 crc kubenswrapper[4791]: E1208 21:21:00.917645 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.173:6443: connect: connection refused" interval="6.4s" Dec 08 21:21:03 crc kubenswrapper[4791]: I1208 21:21:03.599698 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:21:05 crc kubenswrapper[4791]: I1208 21:21:05.252436 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:21:05 crc kubenswrapper[4791]: I1208 21:21:05.252827 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:21:06 crc kubenswrapper[4791]: E1208 21:21:06.216299 4791 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.173:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187f5a4b5e835c44 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 21:20:58.707237956 +0000 UTC m=+135.405996301,LastTimestamp:2025-12-08 21:20:58.707237956 +0000 UTC m=+135.405996301,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.596970 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.597932 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.613050 4791 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.613281 4791 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:06 crc kubenswrapper[4791]: E1208 21:21:06.613809 4791 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.614305 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.899645 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d12160322fa82a6ce280aaaf2d763dd7cdf5bc579fec8218b5c3342feed6886d"} Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.899691 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b9f95d7618662a075575415d1639fcb3dc7ae4d4cec5e86d97009528cced4875"} Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.900053 4791 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.900076 4791 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:06 crc kubenswrapper[4791]: I1208 21:21:06.900795 4791 status_manager.go:851] "Failed to get status for pod" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" Dec 08 21:21:06 crc kubenswrapper[4791]: E1208 21:21:06.900934 4791 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.173:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:07 crc kubenswrapper[4791]: I1208 21:21:07.913491 4791 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="d12160322fa82a6ce280aaaf2d763dd7cdf5bc579fec8218b5c3342feed6886d" exitCode=0 Dec 08 21:21:07 crc kubenswrapper[4791]: I1208 21:21:07.913582 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"d12160322fa82a6ce280aaaf2d763dd7cdf5bc579fec8218b5c3342feed6886d"} Dec 08 21:21:07 crc kubenswrapper[4791]: I1208 21:21:07.913926 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"44f5941312e7b75066a060b3741ab4de2a7492692a8c35c08f1a4ff24cfc41a2"} Dec 08 21:21:07 crc kubenswrapper[4791]: I1208 21:21:07.913955 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bf488339ef9181cd7185178a9b711ff491d58c0d6eea6f6ed124cca7e013f3f3"} Dec 08 21:21:07 crc kubenswrapper[4791]: I1208 21:21:07.913971 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"afce32a71f02f58ee708175011475b68f668cbdc4fd7c5a7066237db82b811a7"} Dec 08 21:21:07 crc kubenswrapper[4791]: I1208 21:21:07.913983 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5480b9955c025b9a8a8d283055daa0cc991a06825a70be0c0b1de0cb07a6d80e"} Dec 08 21:21:08 crc kubenswrapper[4791]: I1208 21:21:08.922658 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"75dd6bcc9ca286f8fa738e9214488474d62f2c0aef6093d12fd8ecb449647ec0"} Dec 08 21:21:08 crc kubenswrapper[4791]: I1208 21:21:08.923636 4791 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:08 crc kubenswrapper[4791]: I1208 21:21:08.923747 4791 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:08 crc kubenswrapper[4791]: I1208 21:21:08.926124 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 21:21:08 crc kubenswrapper[4791]: I1208 21:21:08.926180 4791 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4" exitCode=1 Dec 08 21:21:08 crc kubenswrapper[4791]: I1208 21:21:08.926212 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4"} Dec 08 21:21:08 crc kubenswrapper[4791]: I1208 21:21:08.926631 4791 scope.go:117] "RemoveContainer" containerID="b1bfa0139619e85c94a473c25d6f7163c2bd700b172b828d7decfc3a14baf4b4" Dec 08 21:21:10 crc kubenswrapper[4791]: I1208 21:21:10.473979 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:21:10 crc kubenswrapper[4791]: I1208 21:21:10.943453 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 21:21:10 crc kubenswrapper[4791]: I1208 21:21:10.943815 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"adeb3dc3df779f58a71562e158bcfa67265b5d0fd35594078cdd4a51584cc61f"} Dec 08 21:21:11 crc kubenswrapper[4791]: I1208 21:21:11.262973 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:21:11 crc kubenswrapper[4791]: I1208 21:21:11.267866 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:21:11 crc kubenswrapper[4791]: I1208 21:21:11.614612 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:11 crc kubenswrapper[4791]: I1208 21:21:11.614671 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:11 crc kubenswrapper[4791]: I1208 21:21:11.620996 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:11 crc kubenswrapper[4791]: I1208 21:21:11.950458 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.355121 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" podUID="ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" containerName="oauth-openshift" containerID="cri-o://72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25" gracePeriod=15 Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.758508 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.871904 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-login\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.872254 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-trusted-ca-bundle\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.872274 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-error\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.872898 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.872987 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-session\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873020 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-router-certs\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873435 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-serving-cert\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873480 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-provider-selection\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873503 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnrth\" (UniqueName: \"kubernetes.io/projected/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-kube-api-access-lnrth\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873558 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-ocp-branding-template\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873625 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-service-ca\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873660 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-idp-0-file-data\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873683 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-cliconfig\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873725 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-policies\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.873746 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-dir\") pod \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\" (UID: \"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9\") " Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.874089 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.874138 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.875608 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.875685 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.875798 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.879590 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.879683 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-kube-api-access-lnrth" (OuterVolumeSpecName: "kube-api-access-lnrth") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "kube-api-access-lnrth". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.884410 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.891307 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.891505 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.891858 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.893870 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.894252 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.894312 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" (UID: "ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.942515 4791 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.962152 4791 generic.go:334] "Generic (PLEG): container finished" podID="ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" containerID="72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25" exitCode=0 Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.962336 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" event={"ID":"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9","Type":"ContainerDied","Data":"72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25"} Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.962363 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" event={"ID":"ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9","Type":"ContainerDied","Data":"092b6f4799b7d04a5e3e2bb92ad8083044c658f3b3f26a76f330a9fa255f5cbb"} Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.962380 4791 scope.go:117] "RemoveContainer" containerID="72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.968790 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zll8d" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.968871 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.968923 4791 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.968968 4791 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.972551 4791 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="55bf3374-2d4a-414b-9fce-0a93509a3bc0" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.974932 4791 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://5480b9955c025b9a8a8d283055daa0cc991a06825a70be0c0b1de0cb07a6d80e" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.974976 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.974939 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975054 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnrth\" (UniqueName: \"kubernetes.io/projected/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-kube-api-access-lnrth\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975069 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975084 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975101 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975116 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975130 4791 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975144 4791 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975157 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975168 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975180 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975193 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.975204 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.981615 4791 scope.go:117] "RemoveContainer" containerID="72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25" Dec 08 21:21:13 crc kubenswrapper[4791]: E1208 21:21:13.982080 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25\": container with ID starting with 72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25 not found: ID does not exist" containerID="72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25" Dec 08 21:21:13 crc kubenswrapper[4791]: I1208 21:21:13.982115 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25"} err="failed to get container status \"72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25\": rpc error: code = NotFound desc = could not find container \"72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25\": container with ID starting with 72345cdd21b6de519335e1b13c5462fe60ece4ad7347310d0c93eada4c596d25 not found: ID does not exist" Dec 08 21:21:14 crc kubenswrapper[4791]: I1208 21:21:14.968334 4791 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:14 crc kubenswrapper[4791]: I1208 21:21:14.968438 4791 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:14 crc kubenswrapper[4791]: I1208 21:21:14.971663 4791 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="55bf3374-2d4a-414b-9fce-0a93509a3bc0" Dec 08 21:21:15 crc kubenswrapper[4791]: E1208 21:21:15.053070 4791 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Dec 08 21:21:15 crc kubenswrapper[4791]: I1208 21:21:15.974050 4791 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:15 crc kubenswrapper[4791]: I1208 21:21:15.974545 4791 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:15 crc kubenswrapper[4791]: I1208 21:21:15.977591 4791 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="55bf3374-2d4a-414b-9fce-0a93509a3bc0" Dec 08 21:21:20 crc kubenswrapper[4791]: I1208 21:21:20.478699 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:21:24 crc kubenswrapper[4791]: I1208 21:21:24.098592 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 08 21:21:24 crc kubenswrapper[4791]: I1208 21:21:24.188572 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 08 21:21:24 crc kubenswrapper[4791]: I1208 21:21:24.379373 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 08 21:21:25 crc kubenswrapper[4791]: I1208 21:21:25.014598 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 08 21:21:25 crc kubenswrapper[4791]: I1208 21:21:25.193331 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 08 21:21:25 crc kubenswrapper[4791]: I1208 21:21:25.256180 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 08 21:21:25 crc kubenswrapper[4791]: I1208 21:21:25.504319 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 08 21:21:25 crc kubenswrapper[4791]: I1208 21:21:25.776551 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 08 21:21:25 crc kubenswrapper[4791]: I1208 21:21:25.890172 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 08 21:21:25 crc kubenswrapper[4791]: I1208 21:21:25.929670 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.091054 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.167551 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.204104 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.276310 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.487029 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.562317 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.563723 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.654434 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.699469 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.931360 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.962260 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.988173 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 08 21:21:26 crc kubenswrapper[4791]: I1208 21:21:26.990091 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.195352 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.472286 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.505034 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.529142 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.666750 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.682154 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.720380 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.777112 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.822189 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.839992 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.868866 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.871089 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 08 21:21:27 crc kubenswrapper[4791]: I1208 21:21:27.901826 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.066086 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.149209 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.207742 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.238605 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.244725 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.399413 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.553423 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.604477 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.615366 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.622568 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.784760 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.802907 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.850497 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.906648 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 08 21:21:28 crc kubenswrapper[4791]: I1208 21:21:28.945405 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.064895 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.144430 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.361631 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.385594 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.442179 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.462343 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.551113 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.566040 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.613786 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.671106 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.699435 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.783644 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.835668 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.837327 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 08 21:21:29 crc kubenswrapper[4791]: I1208 21:21:29.904799 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.005409 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.008312 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.045851 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.100860 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.315119 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.386758 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.432935 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.461986 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.582796 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.634280 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.683545 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.746726 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.761207 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.901010 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.983353 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 08 21:21:30 crc kubenswrapper[4791]: I1208 21:21:30.988120 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.005038 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.143027 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.161692 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.161692 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.224409 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.262308 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.315847 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.422695 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.450634 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.487263 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.498224 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.521554 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.522298 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.565004 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.639113 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.742436 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.835782 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 21:21:31 crc kubenswrapper[4791]: I1208 21:21:31.901299 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.417154 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.423268 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.428128 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.496578 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.534915 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.540050 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.588107 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.727566 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.758340 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.776402 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:21:32 crc kubenswrapper[4791]: I1208 21:21:32.989103 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.007999 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.080881 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.105798 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.203957 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.394834 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.404590 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.442733 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.567232 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.577067 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.629602 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.634146 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.682914 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.729378 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.768563 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.865821 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 08 21:21:33 crc kubenswrapper[4791]: I1208 21:21:33.916591 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.030088 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.059517 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.149391 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.226501 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.311731 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.357974 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.391605 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.517310 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.524116 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.568674 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.592489 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.665314 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.758141 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.827790 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.871626 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.890480 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.950220 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.951929 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 21:21:34 crc kubenswrapper[4791]: I1208 21:21:34.979565 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.006233 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.029953 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.041431 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.085695 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.220910 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.252103 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.252191 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.256964 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.309132 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.367843 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.423575 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.462426 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.484273 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.506912 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.621538 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.641539 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.658735 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.688081 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.757972 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.790035 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.833297 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.837431 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 08 21:21:35 crc kubenswrapper[4791]: I1208 21:21:35.999858 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.003755 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.042502 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.082109 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.098200 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.111074 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.167399 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.201514 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.316962 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.346627 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.395243 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.476924 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.731076 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.742156 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 08 21:21:36 crc kubenswrapper[4791]: I1208 21:21:36.990025 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.018330 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.123410 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.182429 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.235465 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.247068 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.484112 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.501343 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.554925 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.584330 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.674923 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.709978 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.728985 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.778463 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.914458 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 08 21:21:37 crc kubenswrapper[4791]: I1208 21:21:37.925570 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.048013 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.138066 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.180629 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.580806 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.607886 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.714801 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.723005 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.727230 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.807822 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.839642 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.856968 4791 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.866677 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.867462 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zll8d","openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.867585 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-54b5875b97-xzbc6"] Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.868028 4791 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.868063 4791 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c1d7d5b0-bf26-4221-9933-1f2af688750f" Dec 08 21:21:38 crc kubenswrapper[4791]: E1208 21:21:38.869133 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" containerName="oauth-openshift" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.869162 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" containerName="oauth-openshift" Dec 08 21:21:38 crc kubenswrapper[4791]: E1208 21:21:38.869184 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" containerName="installer" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.869192 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" containerName="installer" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.869330 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" containerName="oauth-openshift" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.869343 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="31175fa6-cfc1-4032-af15-d8f96bfc29c9" containerName="installer" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.869889 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876348 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876552 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876596 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876620 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876650 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876677 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876775 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.876886 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.877473 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.877654 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.877600 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.878297 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.878630 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.882884 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.888947 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.896130 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=25.896105189 podStartE2EDuration="25.896105189s" podCreationTimestamp="2025-12-08 21:21:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:38.895993076 +0000 UTC m=+175.594751491" watchObservedRunningTime="2025-12-08 21:21:38.896105189 +0000 UTC m=+175.594863544" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.898567 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.910298 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984090 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984137 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984163 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984184 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-session\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984212 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984263 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984282 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984312 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984333 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984356 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984375 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-audit-policies\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984397 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl7n6\" (UniqueName: \"kubernetes.io/projected/5042c73f-b6a1-4935-8652-be87dbe7ac3b-kube-api-access-bl7n6\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984417 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:38 crc kubenswrapper[4791]: I1208 21:21:38.984437 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5042c73f-b6a1-4935-8652-be87dbe7ac3b-audit-dir\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.027852 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085109 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085163 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085188 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085221 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085237 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085261 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085285 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-audit-policies\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085308 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl7n6\" (UniqueName: \"kubernetes.io/projected/5042c73f-b6a1-4935-8652-be87dbe7ac3b-kube-api-access-bl7n6\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085332 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085351 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5042c73f-b6a1-4935-8652-be87dbe7ac3b-audit-dir\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085377 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085395 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085414 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085431 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-session\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.085966 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5042c73f-b6a1-4935-8652-be87dbe7ac3b-audit-dir\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.086765 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.087033 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.087853 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.089236 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5042c73f-b6a1-4935-8652-be87dbe7ac3b-audit-policies\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.092283 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.092389 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.092386 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.092592 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.093370 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-session\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.094634 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.096188 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.105278 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5042c73f-b6a1-4935-8652-be87dbe7ac3b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.116869 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl7n6\" (UniqueName: \"kubernetes.io/projected/5042c73f-b6a1-4935-8652-be87dbe7ac3b-kube-api-access-bl7n6\") pod \"oauth-openshift-54b5875b97-xzbc6\" (UID: \"5042c73f-b6a1-4935-8652-be87dbe7ac3b\") " pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.169347 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.177243 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.207516 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.209687 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.315916 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.458695 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.519579 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.610763 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9" path="/var/lib/kubelet/pods/ef2ffb2c-8fdc-42fc-ab7a-220bb7a257e9/volumes" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.611299 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-54b5875b97-xzbc6"] Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.630183 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.658556 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.682615 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.927205 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.933603 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 08 21:21:39 crc kubenswrapper[4791]: I1208 21:21:39.959693 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.083959 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.105372 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" event={"ID":"5042c73f-b6a1-4935-8652-be87dbe7ac3b","Type":"ContainerStarted","Data":"e430ac48d3c304a1507fa8df27cd72be1be2448a084bbdf5cfd14058a04f1dd5"} Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.108273 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" event={"ID":"5042c73f-b6a1-4935-8652-be87dbe7ac3b","Type":"ContainerStarted","Data":"02250c29c666878c791bca299c90a0199470024d55e101b65d1a99e5e943d72d"} Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.108309 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.132180 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" podStartSLOduration=52.132160769 podStartE2EDuration="52.132160769s" podCreationTimestamp="2025-12-08 21:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:40.129047525 +0000 UTC m=+176.827805890" watchObservedRunningTime="2025-12-08 21:21:40.132160769 +0000 UTC m=+176.830919114" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.133445 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.143116 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-54b5875b97-xzbc6" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.425593 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.564372 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 08 21:21:40 crc kubenswrapper[4791]: I1208 21:21:40.919133 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 08 21:21:41 crc kubenswrapper[4791]: I1208 21:21:41.068292 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:21:41 crc kubenswrapper[4791]: I1208 21:21:41.511902 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 08 21:21:41 crc kubenswrapper[4791]: I1208 21:21:41.523686 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 08 21:21:41 crc kubenswrapper[4791]: I1208 21:21:41.709314 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 08 21:21:41 crc kubenswrapper[4791]: I1208 21:21:41.778903 4791 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 08 21:21:42 crc kubenswrapper[4791]: I1208 21:21:42.001229 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 08 21:21:47 crc kubenswrapper[4791]: I1208 21:21:47.698591 4791 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 08 21:21:47 crc kubenswrapper[4791]: I1208 21:21:47.699335 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://5ab67524ee5c02e932a26ac6ae7a644db172cae53f270f44137fbfc80dbd95c3" gracePeriod=5 Dec 08 21:21:49 crc kubenswrapper[4791]: I1208 21:21:49.439570 4791 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.176259 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.176735 4791 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="5ab67524ee5c02e932a26ac6ae7a644db172cae53f270f44137fbfc80dbd95c3" exitCode=137 Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.593145 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.593265 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.765867 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.765926 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.765970 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.765990 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766056 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766065 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766092 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766065 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766186 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766432 4791 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766449 4791 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766461 4791 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.766477 4791 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.774902 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:53 crc kubenswrapper[4791]: I1208 21:21:53.867359 4791 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:54 crc kubenswrapper[4791]: I1208 21:21:54.184926 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 21:21:54 crc kubenswrapper[4791]: I1208 21:21:54.185013 4791 scope.go:117] "RemoveContainer" containerID="5ab67524ee5c02e932a26ac6ae7a644db172cae53f270f44137fbfc80dbd95c3" Dec 08 21:21:54 crc kubenswrapper[4791]: I1208 21:21:54.185075 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:21:54 crc kubenswrapper[4791]: I1208 21:21:54.771187 4791 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 08 21:21:55 crc kubenswrapper[4791]: I1208 21:21:55.605005 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 08 21:21:58 crc kubenswrapper[4791]: I1208 21:21:58.208493 4791 generic.go:334] "Generic (PLEG): container finished" podID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerID="69f994b6dfedf1324c092e64d4ce5841538a24467cc84da9b5b608f55afdc42c" exitCode=0 Dec 08 21:21:58 crc kubenswrapper[4791]: I1208 21:21:58.208591 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" event={"ID":"409596b6-1fa0-416d-b5a3-a06c2e36c15b","Type":"ContainerDied","Data":"69f994b6dfedf1324c092e64d4ce5841538a24467cc84da9b5b608f55afdc42c"} Dec 08 21:21:58 crc kubenswrapper[4791]: I1208 21:21:58.209355 4791 scope.go:117] "RemoveContainer" containerID="69f994b6dfedf1324c092e64d4ce5841538a24467cc84da9b5b608f55afdc42c" Dec 08 21:21:59 crc kubenswrapper[4791]: I1208 21:21:59.215427 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" event={"ID":"409596b6-1fa0-416d-b5a3-a06c2e36c15b","Type":"ContainerStarted","Data":"ae34794ab6b94f309fcdc6e544ada2683cf7c3f329649c2ab87638491e5e8df5"} Dec 08 21:21:59 crc kubenswrapper[4791]: I1208 21:21:59.216037 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:21:59 crc kubenswrapper[4791]: I1208 21:21:59.219133 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:22:01 crc kubenswrapper[4791]: I1208 21:22:01.710045 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kn6mg"] Dec 08 21:22:01 crc kubenswrapper[4791]: I1208 21:22:01.710907 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" podUID="8bd7cdf9-9085-4702-8a95-f3f445783066" containerName="controller-manager" containerID="cri-o://780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3" gracePeriod=30 Dec 08 21:22:01 crc kubenswrapper[4791]: I1208 21:22:01.808755 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq"] Dec 08 21:22:01 crc kubenswrapper[4791]: I1208 21:22:01.809678 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" podUID="c705ea37-240a-4fd8-9779-98bff52678ca" containerName="route-controller-manager" containerID="cri-o://9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250" gracePeriod=30 Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.187826 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.195073 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.237523 4791 generic.go:334] "Generic (PLEG): container finished" podID="8bd7cdf9-9085-4702-8a95-f3f445783066" containerID="780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3" exitCode=0 Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.237573 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.237648 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" event={"ID":"8bd7cdf9-9085-4702-8a95-f3f445783066","Type":"ContainerDied","Data":"780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3"} Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.237686 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kn6mg" event={"ID":"8bd7cdf9-9085-4702-8a95-f3f445783066","Type":"ContainerDied","Data":"98355b5ff0f37f5f69085547fb09cf3959e6d3aadb97e4f22ccde4fc9bcbc428"} Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.237774 4791 scope.go:117] "RemoveContainer" containerID="780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.241735 4791 generic.go:334] "Generic (PLEG): container finished" podID="c705ea37-240a-4fd8-9779-98bff52678ca" containerID="9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250" exitCode=0 Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.241801 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" event={"ID":"c705ea37-240a-4fd8-9779-98bff52678ca","Type":"ContainerDied","Data":"9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250"} Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.241822 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.241837 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq" event={"ID":"c705ea37-240a-4fd8-9779-98bff52678ca","Type":"ContainerDied","Data":"6eb4c55eaf374e6d37946b209214939753e37410a64dddb7119e1e4d5892e963"} Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.256226 4791 scope.go:117] "RemoveContainer" containerID="780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3" Dec 08 21:22:02 crc kubenswrapper[4791]: E1208 21:22:02.257451 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3\": container with ID starting with 780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3 not found: ID does not exist" containerID="780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.257492 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3"} err="failed to get container status \"780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3\": rpc error: code = NotFound desc = could not find container \"780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3\": container with ID starting with 780a43dcc765947bfc4191796a7229566d6781e2f3ad9a95182e9d43ea9817a3 not found: ID does not exist" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.257525 4791 scope.go:117] "RemoveContainer" containerID="9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.280391 4791 scope.go:117] "RemoveContainer" containerID="9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250" Dec 08 21:22:02 crc kubenswrapper[4791]: E1208 21:22:02.281536 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250\": container with ID starting with 9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250 not found: ID does not exist" containerID="9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.281604 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250"} err="failed to get container status \"9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250\": rpc error: code = NotFound desc = could not find container \"9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250\": container with ID starting with 9dad24b992be360e45b518706e2c4136d87b2242d2c3a3327176c9ceaae98250 not found: ID does not exist" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.290151 4791 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311432 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-client-ca\") pod \"8bd7cdf9-9085-4702-8a95-f3f445783066\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311511 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c705ea37-240a-4fd8-9779-98bff52678ca-serving-cert\") pod \"c705ea37-240a-4fd8-9779-98bff52678ca\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311548 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx9r5\" (UniqueName: \"kubernetes.io/projected/8bd7cdf9-9085-4702-8a95-f3f445783066-kube-api-access-xx9r5\") pod \"8bd7cdf9-9085-4702-8a95-f3f445783066\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311589 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-proxy-ca-bundles\") pod \"8bd7cdf9-9085-4702-8a95-f3f445783066\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311641 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bd7cdf9-9085-4702-8a95-f3f445783066-serving-cert\") pod \"8bd7cdf9-9085-4702-8a95-f3f445783066\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311670 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-client-ca\") pod \"c705ea37-240a-4fd8-9779-98bff52678ca\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311730 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfnkh\" (UniqueName: \"kubernetes.io/projected/c705ea37-240a-4fd8-9779-98bff52678ca-kube-api-access-dfnkh\") pod \"c705ea37-240a-4fd8-9779-98bff52678ca\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311784 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-config\") pod \"8bd7cdf9-9085-4702-8a95-f3f445783066\" (UID: \"8bd7cdf9-9085-4702-8a95-f3f445783066\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.311809 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-config\") pod \"c705ea37-240a-4fd8-9779-98bff52678ca\" (UID: \"c705ea37-240a-4fd8-9779-98bff52678ca\") " Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.313080 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-client-ca" (OuterVolumeSpecName: "client-ca") pod "8bd7cdf9-9085-4702-8a95-f3f445783066" (UID: "8bd7cdf9-9085-4702-8a95-f3f445783066"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.313162 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-client-ca" (OuterVolumeSpecName: "client-ca") pod "c705ea37-240a-4fd8-9779-98bff52678ca" (UID: "c705ea37-240a-4fd8-9779-98bff52678ca"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.313253 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-config" (OuterVolumeSpecName: "config") pod "8bd7cdf9-9085-4702-8a95-f3f445783066" (UID: "8bd7cdf9-9085-4702-8a95-f3f445783066"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.313331 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-config" (OuterVolumeSpecName: "config") pod "c705ea37-240a-4fd8-9779-98bff52678ca" (UID: "c705ea37-240a-4fd8-9779-98bff52678ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.313654 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8bd7cdf9-9085-4702-8a95-f3f445783066" (UID: "8bd7cdf9-9085-4702-8a95-f3f445783066"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.318965 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bd7cdf9-9085-4702-8a95-f3f445783066-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8bd7cdf9-9085-4702-8a95-f3f445783066" (UID: "8bd7cdf9-9085-4702-8a95-f3f445783066"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.319134 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd7cdf9-9085-4702-8a95-f3f445783066-kube-api-access-xx9r5" (OuterVolumeSpecName: "kube-api-access-xx9r5") pod "8bd7cdf9-9085-4702-8a95-f3f445783066" (UID: "8bd7cdf9-9085-4702-8a95-f3f445783066"). InnerVolumeSpecName "kube-api-access-xx9r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.320304 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c705ea37-240a-4fd8-9779-98bff52678ca-kube-api-access-dfnkh" (OuterVolumeSpecName: "kube-api-access-dfnkh") pod "c705ea37-240a-4fd8-9779-98bff52678ca" (UID: "c705ea37-240a-4fd8-9779-98bff52678ca"). InnerVolumeSpecName "kube-api-access-dfnkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.321108 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c705ea37-240a-4fd8-9779-98bff52678ca-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c705ea37-240a-4fd8-9779-98bff52678ca" (UID: "c705ea37-240a-4fd8-9779-98bff52678ca"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413529 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413564 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413576 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c705ea37-240a-4fd8-9779-98bff52678ca-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413587 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx9r5\" (UniqueName: \"kubernetes.io/projected/8bd7cdf9-9085-4702-8a95-f3f445783066-kube-api-access-xx9r5\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413600 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413609 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bd7cdf9-9085-4702-8a95-f3f445783066-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413617 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c705ea37-240a-4fd8-9779-98bff52678ca-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413627 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfnkh\" (UniqueName: \"kubernetes.io/projected/c705ea37-240a-4fd8-9779-98bff52678ca-kube-api-access-dfnkh\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.413636 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd7cdf9-9085-4702-8a95-f3f445783066-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.570760 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kn6mg"] Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.574338 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kn6mg"] Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.591952 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq"] Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.597299 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7gsq"] Dec 08 21:22:02 crc kubenswrapper[4791]: I1208 21:22:02.959301 4791 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.511276 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-84db4478f9-cjpzg"] Dec 08 21:22:03 crc kubenswrapper[4791]: E1208 21:22:03.511528 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd7cdf9-9085-4702-8a95-f3f445783066" containerName="controller-manager" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.511543 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd7cdf9-9085-4702-8a95-f3f445783066" containerName="controller-manager" Dec 08 21:22:03 crc kubenswrapper[4791]: E1208 21:22:03.511554 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.511561 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 21:22:03 crc kubenswrapper[4791]: E1208 21:22:03.511586 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c705ea37-240a-4fd8-9779-98bff52678ca" containerName="route-controller-manager" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.511595 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c705ea37-240a-4fd8-9779-98bff52678ca" containerName="route-controller-manager" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.511727 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c705ea37-240a-4fd8-9779-98bff52678ca" containerName="route-controller-manager" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.511742 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bd7cdf9-9085-4702-8a95-f3f445783066" containerName="controller-manager" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.511761 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.512193 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.514815 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh"] Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.515492 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.518196 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.518254 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.518312 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.518625 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.518828 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.518852 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.518877 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.519091 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.522668 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.522793 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.522983 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.525469 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.528302 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84db4478f9-cjpzg"] Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.529292 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.533460 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh"] Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.603576 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bd7cdf9-9085-4702-8a95-f3f445783066" path="/var/lib/kubelet/pods/8bd7cdf9-9085-4702-8a95-f3f445783066/volumes" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.604422 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c705ea37-240a-4fd8-9779-98bff52678ca" path="/var/lib/kubelet/pods/c705ea37-240a-4fd8-9779-98bff52678ca/volumes" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.628973 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-proxy-ca-bundles\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629033 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-config\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629058 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpc5h\" (UniqueName: \"kubernetes.io/projected/c589d505-5f57-46df-8edc-eddf9a3a0faa-kube-api-access-dpc5h\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629079 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-config\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629099 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9033a4c9-a62e-4a62-9048-32a86a0a19c8-serving-cert\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629117 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8d69\" (UniqueName: \"kubernetes.io/projected/9033a4c9-a62e-4a62-9048-32a86a0a19c8-kube-api-access-l8d69\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629192 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c589d505-5f57-46df-8edc-eddf9a3a0faa-serving-cert\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629259 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-client-ca\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.629433 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-client-ca\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.730471 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-config\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.730811 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpc5h\" (UniqueName: \"kubernetes.io/projected/c589d505-5f57-46df-8edc-eddf9a3a0faa-kube-api-access-dpc5h\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.730942 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-config\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.731056 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9033a4c9-a62e-4a62-9048-32a86a0a19c8-serving-cert\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.731156 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-client-ca\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.731253 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8d69\" (UniqueName: \"kubernetes.io/projected/9033a4c9-a62e-4a62-9048-32a86a0a19c8-kube-api-access-l8d69\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.731369 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c589d505-5f57-46df-8edc-eddf9a3a0faa-serving-cert\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.732175 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-client-ca\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.732304 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-proxy-ca-bundles\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.732466 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-client-ca\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.732864 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-config\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.733401 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-client-ca\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.734612 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-proxy-ca-bundles\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.736186 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c589d505-5f57-46df-8edc-eddf9a3a0faa-serving-cert\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.736719 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9033a4c9-a62e-4a62-9048-32a86a0a19c8-serving-cert\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.742311 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-config\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.748087 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8d69\" (UniqueName: \"kubernetes.io/projected/9033a4c9-a62e-4a62-9048-32a86a0a19c8-kube-api-access-l8d69\") pod \"route-controller-manager-74d58dcfb9-hgchh\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.748654 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpc5h\" (UniqueName: \"kubernetes.io/projected/c589d505-5f57-46df-8edc-eddf9a3a0faa-kube-api-access-dpc5h\") pod \"controller-manager-84db4478f9-cjpzg\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.838589 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:03 crc kubenswrapper[4791]: I1208 21:22:03.847967 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:04 crc kubenswrapper[4791]: I1208 21:22:04.008853 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84db4478f9-cjpzg"] Dec 08 21:22:04 crc kubenswrapper[4791]: W1208 21:22:04.022475 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc589d505_5f57_46df_8edc_eddf9a3a0faa.slice/crio-02cd42ad533809e3d4327dc641e9e25663fb59bc82032f06bf253f3d5a8a7a14 WatchSource:0}: Error finding container 02cd42ad533809e3d4327dc641e9e25663fb59bc82032f06bf253f3d5a8a7a14: Status 404 returned error can't find the container with id 02cd42ad533809e3d4327dc641e9e25663fb59bc82032f06bf253f3d5a8a7a14 Dec 08 21:22:04 crc kubenswrapper[4791]: I1208 21:22:04.072462 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh"] Dec 08 21:22:04 crc kubenswrapper[4791]: I1208 21:22:04.254611 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" event={"ID":"c589d505-5f57-46df-8edc-eddf9a3a0faa","Type":"ContainerStarted","Data":"02cd42ad533809e3d4327dc641e9e25663fb59bc82032f06bf253f3d5a8a7a14"} Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.252342 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.252862 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.252933 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.253914 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.254327 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc" gracePeriod=600 Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.263377 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" event={"ID":"c589d505-5f57-46df-8edc-eddf9a3a0faa","Type":"ContainerStarted","Data":"17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b"} Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.263854 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.267249 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" event={"ID":"9033a4c9-a62e-4a62-9048-32a86a0a19c8","Type":"ContainerStarted","Data":"e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972"} Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.267925 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" event={"ID":"9033a4c9-a62e-4a62-9048-32a86a0a19c8","Type":"ContainerStarted","Data":"c971228c364e3724bced533f163d7ff2a2f8f578fb6247ac77ca7b8cf796163e"} Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.267981 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.269882 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.275559 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:05 crc kubenswrapper[4791]: I1208 21:22:05.291950 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" podStartSLOduration=4.291927303 podStartE2EDuration="4.291927303s" podCreationTimestamp="2025-12-08 21:22:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:22:05.289871351 +0000 UTC m=+201.988629686" watchObservedRunningTime="2025-12-08 21:22:05.291927303 +0000 UTC m=+201.990685648" Dec 08 21:22:06 crc kubenswrapper[4791]: I1208 21:22:06.275091 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc" exitCode=0 Dec 08 21:22:06 crc kubenswrapper[4791]: I1208 21:22:06.275178 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc"} Dec 08 21:22:06 crc kubenswrapper[4791]: I1208 21:22:06.275492 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"f02e0448eb4a9f07a94c3f82c8906b8b6abe63c2035df6075b84928e6646429d"} Dec 08 21:22:06 crc kubenswrapper[4791]: I1208 21:22:06.294305 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" podStartSLOduration=5.294289922 podStartE2EDuration="5.294289922s" podCreationTimestamp="2025-12-08 21:22:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:22:05.329686157 +0000 UTC m=+202.028444522" watchObservedRunningTime="2025-12-08 21:22:06.294289922 +0000 UTC m=+202.993048257" Dec 08 21:22:09 crc kubenswrapper[4791]: I1208 21:22:09.723025 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-84db4478f9-cjpzg"] Dec 08 21:22:09 crc kubenswrapper[4791]: I1208 21:22:09.723852 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" podUID="c589d505-5f57-46df-8edc-eddf9a3a0faa" containerName="controller-manager" containerID="cri-o://17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b" gracePeriod=30 Dec 08 21:22:09 crc kubenswrapper[4791]: I1208 21:22:09.744651 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh"] Dec 08 21:22:09 crc kubenswrapper[4791]: I1208 21:22:09.744859 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" podUID="9033a4c9-a62e-4a62-9048-32a86a0a19c8" containerName="route-controller-manager" containerID="cri-o://e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972" gracePeriod=30 Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.219362 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.282020 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.299197 4791 generic.go:334] "Generic (PLEG): container finished" podID="9033a4c9-a62e-4a62-9048-32a86a0a19c8" containerID="e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972" exitCode=0 Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.299286 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" event={"ID":"9033a4c9-a62e-4a62-9048-32a86a0a19c8","Type":"ContainerDied","Data":"e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972"} Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.299323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" event={"ID":"9033a4c9-a62e-4a62-9048-32a86a0a19c8","Type":"ContainerDied","Data":"c971228c364e3724bced533f163d7ff2a2f8f578fb6247ac77ca7b8cf796163e"} Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.299346 4791 scope.go:117] "RemoveContainer" containerID="e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.299498 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.305878 4791 generic.go:334] "Generic (PLEG): container finished" podID="c589d505-5f57-46df-8edc-eddf9a3a0faa" containerID="17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b" exitCode=0 Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.305931 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" event={"ID":"c589d505-5f57-46df-8edc-eddf9a3a0faa","Type":"ContainerDied","Data":"17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b"} Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.305964 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" event={"ID":"c589d505-5f57-46df-8edc-eddf9a3a0faa","Type":"ContainerDied","Data":"02cd42ad533809e3d4327dc641e9e25663fb59bc82032f06bf253f3d5a8a7a14"} Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.306039 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84db4478f9-cjpzg" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.320217 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8d69\" (UniqueName: \"kubernetes.io/projected/9033a4c9-a62e-4a62-9048-32a86a0a19c8-kube-api-access-l8d69\") pod \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.320285 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-client-ca\") pod \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.320326 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-config\") pod \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.320394 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9033a4c9-a62e-4a62-9048-32a86a0a19c8-serving-cert\") pod \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\" (UID: \"9033a4c9-a62e-4a62-9048-32a86a0a19c8\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.321889 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-config" (OuterVolumeSpecName: "config") pod "9033a4c9-a62e-4a62-9048-32a86a0a19c8" (UID: "9033a4c9-a62e-4a62-9048-32a86a0a19c8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.322559 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-client-ca" (OuterVolumeSpecName: "client-ca") pod "9033a4c9-a62e-4a62-9048-32a86a0a19c8" (UID: "9033a4c9-a62e-4a62-9048-32a86a0a19c8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.329025 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9033a4c9-a62e-4a62-9048-32a86a0a19c8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9033a4c9-a62e-4a62-9048-32a86a0a19c8" (UID: "9033a4c9-a62e-4a62-9048-32a86a0a19c8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.329078 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9033a4c9-a62e-4a62-9048-32a86a0a19c8-kube-api-access-l8d69" (OuterVolumeSpecName: "kube-api-access-l8d69") pod "9033a4c9-a62e-4a62-9048-32a86a0a19c8" (UID: "9033a4c9-a62e-4a62-9048-32a86a0a19c8"). InnerVolumeSpecName "kube-api-access-l8d69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.329453 4791 scope.go:117] "RemoveContainer" containerID="e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972" Dec 08 21:22:10 crc kubenswrapper[4791]: E1208 21:22:10.329994 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972\": container with ID starting with e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972 not found: ID does not exist" containerID="e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.330046 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972"} err="failed to get container status \"e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972\": rpc error: code = NotFound desc = could not find container \"e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972\": container with ID starting with e9f77707ed7bb81439a928b1b6203b3afc525269b4d2118e527e534bfbe18972 not found: ID does not exist" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.330090 4791 scope.go:117] "RemoveContainer" containerID="17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.351072 4791 scope.go:117] "RemoveContainer" containerID="17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b" Dec 08 21:22:10 crc kubenswrapper[4791]: E1208 21:22:10.351789 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b\": container with ID starting with 17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b not found: ID does not exist" containerID="17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.351848 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b"} err="failed to get container status \"17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b\": rpc error: code = NotFound desc = could not find container \"17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b\": container with ID starting with 17f29d1e99d44cacedc1d2c5264a2c1bb09a2784b296d78f3ae247a3b32fdf4b not found: ID does not exist" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.421496 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c589d505-5f57-46df-8edc-eddf9a3a0faa-serving-cert\") pod \"c589d505-5f57-46df-8edc-eddf9a3a0faa\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.421602 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpc5h\" (UniqueName: \"kubernetes.io/projected/c589d505-5f57-46df-8edc-eddf9a3a0faa-kube-api-access-dpc5h\") pod \"c589d505-5f57-46df-8edc-eddf9a3a0faa\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.421650 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-config\") pod \"c589d505-5f57-46df-8edc-eddf9a3a0faa\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.421771 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-client-ca\") pod \"c589d505-5f57-46df-8edc-eddf9a3a0faa\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.421796 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-proxy-ca-bundles\") pod \"c589d505-5f57-46df-8edc-eddf9a3a0faa\" (UID: \"c589d505-5f57-46df-8edc-eddf9a3a0faa\") " Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.422139 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8d69\" (UniqueName: \"kubernetes.io/projected/9033a4c9-a62e-4a62-9048-32a86a0a19c8-kube-api-access-l8d69\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.422158 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.422171 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9033a4c9-a62e-4a62-9048-32a86a0a19c8-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.422182 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9033a4c9-a62e-4a62-9048-32a86a0a19c8-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.423158 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c589d505-5f57-46df-8edc-eddf9a3a0faa" (UID: "c589d505-5f57-46df-8edc-eddf9a3a0faa"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.423180 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-client-ca" (OuterVolumeSpecName: "client-ca") pod "c589d505-5f57-46df-8edc-eddf9a3a0faa" (UID: "c589d505-5f57-46df-8edc-eddf9a3a0faa"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.423222 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-config" (OuterVolumeSpecName: "config") pod "c589d505-5f57-46df-8edc-eddf9a3a0faa" (UID: "c589d505-5f57-46df-8edc-eddf9a3a0faa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.426192 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c589d505-5f57-46df-8edc-eddf9a3a0faa-kube-api-access-dpc5h" (OuterVolumeSpecName: "kube-api-access-dpc5h") pod "c589d505-5f57-46df-8edc-eddf9a3a0faa" (UID: "c589d505-5f57-46df-8edc-eddf9a3a0faa"). InnerVolumeSpecName "kube-api-access-dpc5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.428763 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c589d505-5f57-46df-8edc-eddf9a3a0faa-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c589d505-5f57-46df-8edc-eddf9a3a0faa" (UID: "c589d505-5f57-46df-8edc-eddf9a3a0faa"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.523671 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c589d505-5f57-46df-8edc-eddf9a3a0faa-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.523743 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpc5h\" (UniqueName: \"kubernetes.io/projected/c589d505-5f57-46df-8edc-eddf9a3a0faa-kube-api-access-dpc5h\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.523759 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.523772 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.523784 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c589d505-5f57-46df-8edc-eddf9a3a0faa-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.638837 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh"] Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.642686 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74d58dcfb9-hgchh"] Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.647847 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-84db4478f9-cjpzg"] Dec 08 21:22:10 crc kubenswrapper[4791]: I1208 21:22:10.651965 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-84db4478f9-cjpzg"] Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.520877 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v"] Dec 08 21:22:11 crc kubenswrapper[4791]: E1208 21:22:11.521278 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c589d505-5f57-46df-8edc-eddf9a3a0faa" containerName="controller-manager" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.521302 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c589d505-5f57-46df-8edc-eddf9a3a0faa" containerName="controller-manager" Dec 08 21:22:11 crc kubenswrapper[4791]: E1208 21:22:11.521329 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9033a4c9-a62e-4a62-9048-32a86a0a19c8" containerName="route-controller-manager" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.521351 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9033a4c9-a62e-4a62-9048-32a86a0a19c8" containerName="route-controller-manager" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.521504 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c589d505-5f57-46df-8edc-eddf9a3a0faa" containerName="controller-manager" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.521523 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9033a4c9-a62e-4a62-9048-32a86a0a19c8" containerName="route-controller-manager" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.522106 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.524048 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.524123 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.524597 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.525055 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.525103 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.527244 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-588d5457d6-f5kjf"] Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.528299 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.533035 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-588d5457d6-f5kjf"] Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.535311 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.535385 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.536305 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.537459 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.537945 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538176 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-client-ca\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538239 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f96561d-3c31-4257-b391-8e02d1ec5a4c-serving-cert\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538329 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-proxy-ca-bundles\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538368 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f96561d-3c31-4257-b391-8e02d1ec5a4c-config\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538415 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-config\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538448 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3f96561d-3c31-4257-b391-8e02d1ec5a4c-client-ca\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538477 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53126634-8c90-4fa2-9d8f-738a08fd8831-serving-cert\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538476 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v"] Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538526 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538531 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rzfb\" (UniqueName: \"kubernetes.io/projected/53126634-8c90-4fa2-9d8f-738a08fd8831-kube-api-access-8rzfb\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.538601 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm86d\" (UniqueName: \"kubernetes.io/projected/3f96561d-3c31-4257-b391-8e02d1ec5a4c-kube-api-access-mm86d\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.542513 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.545119 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.606382 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9033a4c9-a62e-4a62-9048-32a86a0a19c8" path="/var/lib/kubelet/pods/9033a4c9-a62e-4a62-9048-32a86a0a19c8/volumes" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.607003 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c589d505-5f57-46df-8edc-eddf9a3a0faa" path="/var/lib/kubelet/pods/c589d505-5f57-46df-8edc-eddf9a3a0faa/volumes" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.640590 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f96561d-3c31-4257-b391-8e02d1ec5a4c-serving-cert\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.640755 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-proxy-ca-bundles\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.640806 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f96561d-3c31-4257-b391-8e02d1ec5a4c-config\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.640897 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-config\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.640938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3f96561d-3c31-4257-b391-8e02d1ec5a4c-client-ca\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.641008 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53126634-8c90-4fa2-9d8f-738a08fd8831-serving-cert\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.641106 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rzfb\" (UniqueName: \"kubernetes.io/projected/53126634-8c90-4fa2-9d8f-738a08fd8831-kube-api-access-8rzfb\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.641212 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm86d\" (UniqueName: \"kubernetes.io/projected/3f96561d-3c31-4257-b391-8e02d1ec5a4c-kube-api-access-mm86d\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.641337 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-client-ca\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.642901 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3f96561d-3c31-4257-b391-8e02d1ec5a4c-client-ca\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.643096 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f96561d-3c31-4257-b391-8e02d1ec5a4c-config\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.643321 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-client-ca\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.643638 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-config\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.644238 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-proxy-ca-bundles\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.648876 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53126634-8c90-4fa2-9d8f-738a08fd8831-serving-cert\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.658269 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f96561d-3c31-4257-b391-8e02d1ec5a4c-serving-cert\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.665161 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rzfb\" (UniqueName: \"kubernetes.io/projected/53126634-8c90-4fa2-9d8f-738a08fd8831-kube-api-access-8rzfb\") pod \"controller-manager-588d5457d6-f5kjf\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.665392 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm86d\" (UniqueName: \"kubernetes.io/projected/3f96561d-3c31-4257-b391-8e02d1ec5a4c-kube-api-access-mm86d\") pod \"route-controller-manager-6dcc9cf7cd-cr28v\" (UID: \"3f96561d-3c31-4257-b391-8e02d1ec5a4c\") " pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.850405 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:11 crc kubenswrapper[4791]: I1208 21:22:11.871173 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:12 crc kubenswrapper[4791]: I1208 21:22:12.126909 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-588d5457d6-f5kjf"] Dec 08 21:22:12 crc kubenswrapper[4791]: W1208 21:22:12.144054 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53126634_8c90_4fa2_9d8f_738a08fd8831.slice/crio-3f1a9474cd96b1666c447d58cc32f35d95b5acb0299e42774f847a3948a72b8a WatchSource:0}: Error finding container 3f1a9474cd96b1666c447d58cc32f35d95b5acb0299e42774f847a3948a72b8a: Status 404 returned error can't find the container with id 3f1a9474cd96b1666c447d58cc32f35d95b5acb0299e42774f847a3948a72b8a Dec 08 21:22:12 crc kubenswrapper[4791]: I1208 21:22:12.153585 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v"] Dec 08 21:22:12 crc kubenswrapper[4791]: I1208 21:22:12.320998 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" event={"ID":"3f96561d-3c31-4257-b391-8e02d1ec5a4c","Type":"ContainerStarted","Data":"e6492e0d691203d7dfcf234e0a203a30615c1b4ec3ec8ac8e13a45d2ce80a382"} Dec 08 21:22:12 crc kubenswrapper[4791]: I1208 21:22:12.322492 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" event={"ID":"53126634-8c90-4fa2-9d8f-738a08fd8831","Type":"ContainerStarted","Data":"3f1a9474cd96b1666c447d58cc32f35d95b5acb0299e42774f847a3948a72b8a"} Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.334941 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" event={"ID":"53126634-8c90-4fa2-9d8f-738a08fd8831","Type":"ContainerStarted","Data":"9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137"} Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.335260 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.338523 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" event={"ID":"3f96561d-3c31-4257-b391-8e02d1ec5a4c","Type":"ContainerStarted","Data":"12eb54ff6e0c47fdcb316cf5ec82b2f634e443ccfd0e3878b9d09af0a7a29f75"} Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.338859 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.339773 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.343958 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.352747 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" podStartSLOduration=4.352721654 podStartE2EDuration="4.352721654s" podCreationTimestamp="2025-12-08 21:22:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:22:13.35099027 +0000 UTC m=+210.049748625" watchObservedRunningTime="2025-12-08 21:22:13.352721654 +0000 UTC m=+210.051479999" Dec 08 21:22:13 crc kubenswrapper[4791]: I1208 21:22:13.428726 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6dcc9cf7cd-cr28v" podStartSLOduration=4.428673322 podStartE2EDuration="4.428673322s" podCreationTimestamp="2025-12-08 21:22:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:22:13.413414527 +0000 UTC m=+210.112172882" watchObservedRunningTime="2025-12-08 21:22:13.428673322 +0000 UTC m=+210.127431687" Dec 08 21:22:22 crc kubenswrapper[4791]: I1208 21:22:22.979222 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4j746"] Dec 08 21:22:22 crc kubenswrapper[4791]: I1208 21:22:22.980793 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:22 crc kubenswrapper[4791]: I1208 21:22:22.986698 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4j746"] Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.097256 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-registry-tls\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.097865 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.097896 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzrxv\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-kube-api-access-vzrxv\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.097937 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-bound-sa-token\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.097958 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.097979 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.097998 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-registry-certificates\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.098024 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-trusted-ca\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.121793 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.199345 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-trusted-ca\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.199405 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-registry-tls\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.199450 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzrxv\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-kube-api-access-vzrxv\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.199483 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-bound-sa-token\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.199502 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.199518 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-registry-certificates\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.199535 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.200404 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.201142 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-trusted-ca\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.201380 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-registry-certificates\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.207056 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-registry-tls\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.208107 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.214993 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-bound-sa-token\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.217067 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzrxv\" (UniqueName: \"kubernetes.io/projected/db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b-kube-api-access-vzrxv\") pod \"image-registry-66df7c8f76-4j746\" (UID: \"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b\") " pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.299750 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:23 crc kubenswrapper[4791]: I1208 21:22:23.764140 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4j746"] Dec 08 21:22:24 crc kubenswrapper[4791]: I1208 21:22:24.438688 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4j746" event={"ID":"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b","Type":"ContainerStarted","Data":"371397f45e1d02d7f293a71d7f87614087ca057a479e053b1082aeecc0e89152"} Dec 08 21:22:24 crc kubenswrapper[4791]: I1208 21:22:24.439164 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:24 crc kubenswrapper[4791]: I1208 21:22:24.439179 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4j746" event={"ID":"db0baa1d-fc68-4f9a-a66d-d5d8f9cf584b","Type":"ContainerStarted","Data":"9c443a1126d5398e93e6794b96201242fc52a8f7d3985edfa9a850084898b4b0"} Dec 08 21:22:24 crc kubenswrapper[4791]: I1208 21:22:24.458555 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-4j746" podStartSLOduration=2.458535259 podStartE2EDuration="2.458535259s" podCreationTimestamp="2025-12-08 21:22:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:22:24.454948899 +0000 UTC m=+221.153707254" watchObservedRunningTime="2025-12-08 21:22:24.458535259 +0000 UTC m=+221.157293604" Dec 08 21:22:43 crc kubenswrapper[4791]: I1208 21:22:43.306031 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-4j746" Dec 08 21:22:43 crc kubenswrapper[4791]: I1208 21:22:43.366753 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lk2tp"] Dec 08 21:23:01 crc kubenswrapper[4791]: I1208 21:23:01.705171 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-588d5457d6-f5kjf"] Dec 08 21:23:01 crc kubenswrapper[4791]: I1208 21:23:01.705981 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" podUID="53126634-8c90-4fa2-9d8f-738a08fd8831" containerName="controller-manager" containerID="cri-o://9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137" gracePeriod=30 Dec 08 21:23:01 crc kubenswrapper[4791]: I1208 21:23:01.872155 4791 patch_prober.go:28] interesting pod/controller-manager-588d5457d6-f5kjf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Dec 08 21:23:01 crc kubenswrapper[4791]: I1208 21:23:01.872242 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" podUID="53126634-8c90-4fa2-9d8f-738a08fd8831" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.073620 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.156573 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-proxy-ca-bundles\") pod \"53126634-8c90-4fa2-9d8f-738a08fd8831\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.156809 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-client-ca\") pod \"53126634-8c90-4fa2-9d8f-738a08fd8831\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.156834 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53126634-8c90-4fa2-9d8f-738a08fd8831-serving-cert\") pod \"53126634-8c90-4fa2-9d8f-738a08fd8831\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.156889 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-config\") pod \"53126634-8c90-4fa2-9d8f-738a08fd8831\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.156918 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rzfb\" (UniqueName: \"kubernetes.io/projected/53126634-8c90-4fa2-9d8f-738a08fd8831-kube-api-access-8rzfb\") pod \"53126634-8c90-4fa2-9d8f-738a08fd8831\" (UID: \"53126634-8c90-4fa2-9d8f-738a08fd8831\") " Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.157558 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "53126634-8c90-4fa2-9d8f-738a08fd8831" (UID: "53126634-8c90-4fa2-9d8f-738a08fd8831"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.157579 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-client-ca" (OuterVolumeSpecName: "client-ca") pod "53126634-8c90-4fa2-9d8f-738a08fd8831" (UID: "53126634-8c90-4fa2-9d8f-738a08fd8831"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.157667 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-config" (OuterVolumeSpecName: "config") pod "53126634-8c90-4fa2-9d8f-738a08fd8831" (UID: "53126634-8c90-4fa2-9d8f-738a08fd8831"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.158030 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.158052 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.158066 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53126634-8c90-4fa2-9d8f-738a08fd8831-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.162324 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53126634-8c90-4fa2-9d8f-738a08fd8831-kube-api-access-8rzfb" (OuterVolumeSpecName: "kube-api-access-8rzfb") pod "53126634-8c90-4fa2-9d8f-738a08fd8831" (UID: "53126634-8c90-4fa2-9d8f-738a08fd8831"). InnerVolumeSpecName "kube-api-access-8rzfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.162509 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53126634-8c90-4fa2-9d8f-738a08fd8831-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "53126634-8c90-4fa2-9d8f-738a08fd8831" (UID: "53126634-8c90-4fa2-9d8f-738a08fd8831"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.259868 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53126634-8c90-4fa2-9d8f-738a08fd8831-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.259916 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rzfb\" (UniqueName: \"kubernetes.io/projected/53126634-8c90-4fa2-9d8f-738a08fd8831-kube-api-access-8rzfb\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.656567 4791 generic.go:334] "Generic (PLEG): container finished" podID="53126634-8c90-4fa2-9d8f-738a08fd8831" containerID="9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137" exitCode=0 Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.656630 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" event={"ID":"53126634-8c90-4fa2-9d8f-738a08fd8831","Type":"ContainerDied","Data":"9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137"} Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.656673 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.656698 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-588d5457d6-f5kjf" event={"ID":"53126634-8c90-4fa2-9d8f-738a08fd8831","Type":"ContainerDied","Data":"3f1a9474cd96b1666c447d58cc32f35d95b5acb0299e42774f847a3948a72b8a"} Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.656745 4791 scope.go:117] "RemoveContainer" containerID="9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.672929 4791 scope.go:117] "RemoveContainer" containerID="9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137" Dec 08 21:23:02 crc kubenswrapper[4791]: E1208 21:23:02.673324 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137\": container with ID starting with 9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137 not found: ID does not exist" containerID="9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.673409 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137"} err="failed to get container status \"9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137\": rpc error: code = NotFound desc = could not find container \"9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137\": container with ID starting with 9dcb51fea87f5d2ddb7d2ee678e7646e828c2a8923d841469e39685cc4539137 not found: ID does not exist" Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.687511 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-588d5457d6-f5kjf"] Dec 08 21:23:02 crc kubenswrapper[4791]: I1208 21:23:02.690806 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-588d5457d6-f5kjf"] Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.554019 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-865db64698-c5x72"] Dec 08 21:23:03 crc kubenswrapper[4791]: E1208 21:23:03.554740 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53126634-8c90-4fa2-9d8f-738a08fd8831" containerName="controller-manager" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.554753 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="53126634-8c90-4fa2-9d8f-738a08fd8831" containerName="controller-manager" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.554862 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="53126634-8c90-4fa2-9d8f-738a08fd8831" containerName="controller-manager" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.555275 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.561416 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.561532 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.561881 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.562085 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.562287 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.562345 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.565936 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-865db64698-c5x72"] Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.568235 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.576670 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssct6\" (UniqueName: \"kubernetes.io/projected/c8a75422-db1c-4a94-a794-68cb5bc73f26-kube-api-access-ssct6\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.576754 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-client-ca\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.576807 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-config\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.576887 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-proxy-ca-bundles\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.576925 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8a75422-db1c-4a94-a794-68cb5bc73f26-serving-cert\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.606846 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53126634-8c90-4fa2-9d8f-738a08fd8831" path="/var/lib/kubelet/pods/53126634-8c90-4fa2-9d8f-738a08fd8831/volumes" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.678209 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssct6\" (UniqueName: \"kubernetes.io/projected/c8a75422-db1c-4a94-a794-68cb5bc73f26-kube-api-access-ssct6\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.678254 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-client-ca\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.678290 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-config\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.678539 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-proxy-ca-bundles\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.678588 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8a75422-db1c-4a94-a794-68cb5bc73f26-serving-cert\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.680725 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-config\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.681762 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-client-ca\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.682472 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8a75422-db1c-4a94-a794-68cb5bc73f26-proxy-ca-bundles\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.686824 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8a75422-db1c-4a94-a794-68cb5bc73f26-serving-cert\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.697408 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssct6\" (UniqueName: \"kubernetes.io/projected/c8a75422-db1c-4a94-a794-68cb5bc73f26-kube-api-access-ssct6\") pod \"controller-manager-865db64698-c5x72\" (UID: \"c8a75422-db1c-4a94-a794-68cb5bc73f26\") " pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:03 crc kubenswrapper[4791]: I1208 21:23:03.871342 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:04 crc kubenswrapper[4791]: I1208 21:23:04.075767 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-865db64698-c5x72"] Dec 08 21:23:04 crc kubenswrapper[4791]: I1208 21:23:04.685170 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-865db64698-c5x72" event={"ID":"c8a75422-db1c-4a94-a794-68cb5bc73f26","Type":"ContainerStarted","Data":"717557ad5c745d27f19f61d6079f10163ac28bac202288536b26f83ccbcb46f6"} Dec 08 21:23:04 crc kubenswrapper[4791]: I1208 21:23:04.685225 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-865db64698-c5x72" event={"ID":"c8a75422-db1c-4a94-a794-68cb5bc73f26","Type":"ContainerStarted","Data":"dfd8af9e13bfa68edcba4132f224281eb7f736b2ab52d54c6a613538a18435e3"} Dec 08 21:23:04 crc kubenswrapper[4791]: I1208 21:23:04.685583 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:04 crc kubenswrapper[4791]: I1208 21:23:04.707924 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-865db64698-c5x72" podStartSLOduration=3.707896568 podStartE2EDuration="3.707896568s" podCreationTimestamp="2025-12-08 21:23:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:23:04.705790764 +0000 UTC m=+261.404549109" watchObservedRunningTime="2025-12-08 21:23:04.707896568 +0000 UTC m=+261.406654913" Dec 08 21:23:04 crc kubenswrapper[4791]: I1208 21:23:04.920243 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-865db64698-c5x72" Dec 08 21:23:08 crc kubenswrapper[4791]: I1208 21:23:08.416896 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" podUID="097c1e37-dbfb-4e31-9c4d-561c6bed9933" containerName="registry" containerID="cri-o://794aa42a619242e946a94d06aeaa582e9b8bb74e8b45ea5a8753b040f252cee4" gracePeriod=30 Dec 08 21:23:08 crc kubenswrapper[4791]: I1208 21:23:08.706831 4791 generic.go:334] "Generic (PLEG): container finished" podID="097c1e37-dbfb-4e31-9c4d-561c6bed9933" containerID="794aa42a619242e946a94d06aeaa582e9b8bb74e8b45ea5a8753b040f252cee4" exitCode=0 Dec 08 21:23:08 crc kubenswrapper[4791]: I1208 21:23:08.706886 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" event={"ID":"097c1e37-dbfb-4e31-9c4d-561c6bed9933","Type":"ContainerDied","Data":"794aa42a619242e946a94d06aeaa582e9b8bb74e8b45ea5a8753b040f252cee4"} Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.103627 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171138 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-certificates\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171203 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/097c1e37-dbfb-4e31-9c4d-561c6bed9933-installation-pull-secrets\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171262 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-trusted-ca\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171294 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/097c1e37-dbfb-4e31-9c4d-561c6bed9933-ca-trust-extracted\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171553 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171649 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-tls\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171674 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-bound-sa-token\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.171748 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7278\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-kube-api-access-z7278\") pod \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\" (UID: \"097c1e37-dbfb-4e31-9c4d-561c6bed9933\") " Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.172510 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.172624 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.199017 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.199519 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-kube-api-access-z7278" (OuterVolumeSpecName: "kube-api-access-z7278") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "kube-api-access-z7278". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.200638 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/097c1e37-dbfb-4e31-9c4d-561c6bed9933-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.202179 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.205088 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/097c1e37-dbfb-4e31-9c4d-561c6bed9933-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.205878 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "097c1e37-dbfb-4e31-9c4d-561c6bed9933" (UID: "097c1e37-dbfb-4e31-9c4d-561c6bed9933"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.273373 4791 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.273413 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.273424 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7278\" (UniqueName: \"kubernetes.io/projected/097c1e37-dbfb-4e31-9c4d-561c6bed9933-kube-api-access-z7278\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.273435 4791 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.273448 4791 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/097c1e37-dbfb-4e31-9c4d-561c6bed9933-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.273455 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/097c1e37-dbfb-4e31-9c4d-561c6bed9933-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.273463 4791 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/097c1e37-dbfb-4e31-9c4d-561c6bed9933-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.712992 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" event={"ID":"097c1e37-dbfb-4e31-9c4d-561c6bed9933","Type":"ContainerDied","Data":"747841df8457cb7eced4df60ab2d49c1535f36fda66f0b2ec841619c761e1415"} Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.713055 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lk2tp" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.713064 4791 scope.go:117] "RemoveContainer" containerID="794aa42a619242e946a94d06aeaa582e9b8bb74e8b45ea5a8753b040f252cee4" Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.733681 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lk2tp"] Dec 08 21:23:09 crc kubenswrapper[4791]: I1208 21:23:09.738762 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lk2tp"] Dec 08 21:23:11 crc kubenswrapper[4791]: I1208 21:23:11.605091 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="097c1e37-dbfb-4e31-9c4d-561c6bed9933" path="/var/lib/kubelet/pods/097c1e37-dbfb-4e31-9c4d-561c6bed9933/volumes" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.381320 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mbbd4"] Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.382406 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mbbd4" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="registry-server" containerID="cri-o://c58f081ecf66fdacb92330b44f7c9641e832576bc3c29cebfe72777ed49582e7" gracePeriod=30 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.394332 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-npdcj"] Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.394808 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-npdcj" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="registry-server" containerID="cri-o://b13f0a62f14238b27f87c6bbc7ea4f8b347d0e1f448030f2c478f7fa08d7d7a0" gracePeriod=30 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.400299 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w9zwt"] Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.400676 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" containerID="cri-o://ae34794ab6b94f309fcdc6e544ada2683cf7c3f329649c2ab87638491e5e8df5" gracePeriod=30 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.404830 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck6br"] Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.405115 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ck6br" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="registry-server" containerID="cri-o://f37ff2d8945007128451f0ea660ea36b550c924d6985bd3540e6b54d26059ef2" gracePeriod=30 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.420159 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g287b"] Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.420476 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g287b" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="registry-server" containerID="cri-o://0c45b6c3589cbf6c00821b2a723921ccdf269c08d677ab5f1e48ce6bed5e36b1" gracePeriod=30 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.426141 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5qfdd"] Dec 08 21:23:25 crc kubenswrapper[4791]: E1208 21:23:25.426489 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="097c1e37-dbfb-4e31-9c4d-561c6bed9933" containerName="registry" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.426584 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="097c1e37-dbfb-4e31-9c4d-561c6bed9933" containerName="registry" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.426892 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="097c1e37-dbfb-4e31-9c4d-561c6bed9933" containerName="registry" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.427486 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.436996 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5qfdd"] Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.608106 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.608579 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.608646 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntsgg\" (UniqueName: \"kubernetes.io/projected/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-kube-api-access-ntsgg\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.709353 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntsgg\" (UniqueName: \"kubernetes.io/projected/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-kube-api-access-ntsgg\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.709448 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.709497 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.710831 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.717166 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.728841 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntsgg\" (UniqueName: \"kubernetes.io/projected/9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe-kube-api-access-ntsgg\") pod \"marketplace-operator-79b997595-5qfdd\" (UID: \"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.752204 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.818341 4791 generic.go:334] "Generic (PLEG): container finished" podID="0decf941-a6e4-485f-afd4-7972d332952a" containerID="b13f0a62f14238b27f87c6bbc7ea4f8b347d0e1f448030f2c478f7fa08d7d7a0" exitCode=0 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.818454 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npdcj" event={"ID":"0decf941-a6e4-485f-afd4-7972d332952a","Type":"ContainerDied","Data":"b13f0a62f14238b27f87c6bbc7ea4f8b347d0e1f448030f2c478f7fa08d7d7a0"} Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.821351 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerID="0c45b6c3589cbf6c00821b2a723921ccdf269c08d677ab5f1e48ce6bed5e36b1" exitCode=0 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.821438 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g287b" event={"ID":"f4fae2f4-952d-43c7-b7a2-55c898273973","Type":"ContainerDied","Data":"0c45b6c3589cbf6c00821b2a723921ccdf269c08d677ab5f1e48ce6bed5e36b1"} Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.823129 4791 generic.go:334] "Generic (PLEG): container finished" podID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerID="ae34794ab6b94f309fcdc6e544ada2683cf7c3f329649c2ab87638491e5e8df5" exitCode=0 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.823177 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" event={"ID":"409596b6-1fa0-416d-b5a3-a06c2e36c15b","Type":"ContainerDied","Data":"ae34794ab6b94f309fcdc6e544ada2683cf7c3f329649c2ab87638491e5e8df5"} Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.823209 4791 scope.go:117] "RemoveContainer" containerID="69f994b6dfedf1324c092e64d4ce5841538a24467cc84da9b5b608f55afdc42c" Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.825737 4791 generic.go:334] "Generic (PLEG): container finished" podID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerID="c58f081ecf66fdacb92330b44f7c9641e832576bc3c29cebfe72777ed49582e7" exitCode=0 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.825859 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbbd4" event={"ID":"175a64fd-0187-4d28-87f1-76194cac1bf2","Type":"ContainerDied","Data":"c58f081ecf66fdacb92330b44f7c9641e832576bc3c29cebfe72777ed49582e7"} Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.827797 4791 generic.go:334] "Generic (PLEG): container finished" podID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerID="f37ff2d8945007128451f0ea660ea36b550c924d6985bd3540e6b54d26059ef2" exitCode=0 Dec 08 21:23:25 crc kubenswrapper[4791]: I1208 21:23:25.827902 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck6br" event={"ID":"e3a99522-0cda-4894-8d9a-bc8aaa7763e3","Type":"ContainerDied","Data":"f37ff2d8945007128451f0ea660ea36b550c924d6985bd3540e6b54d26059ef2"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.118588 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.127567 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.216236 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-utilities\") pod \"0decf941-a6e4-485f-afd4-7972d332952a\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.216314 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-catalog-content\") pod \"0decf941-a6e4-485f-afd4-7972d332952a\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.216364 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dls9d\" (UniqueName: \"kubernetes.io/projected/175a64fd-0187-4d28-87f1-76194cac1bf2-kube-api-access-dls9d\") pod \"175a64fd-0187-4d28-87f1-76194cac1bf2\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.216467 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcngh\" (UniqueName: \"kubernetes.io/projected/0decf941-a6e4-485f-afd4-7972d332952a-kube-api-access-qcngh\") pod \"0decf941-a6e4-485f-afd4-7972d332952a\" (UID: \"0decf941-a6e4-485f-afd4-7972d332952a\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.216487 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-catalog-content\") pod \"175a64fd-0187-4d28-87f1-76194cac1bf2\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.216508 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-utilities\") pod \"175a64fd-0187-4d28-87f1-76194cac1bf2\" (UID: \"175a64fd-0187-4d28-87f1-76194cac1bf2\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.217580 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-utilities" (OuterVolumeSpecName: "utilities") pod "175a64fd-0187-4d28-87f1-76194cac1bf2" (UID: "175a64fd-0187-4d28-87f1-76194cac1bf2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.217659 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-utilities" (OuterVolumeSpecName: "utilities") pod "0decf941-a6e4-485f-afd4-7972d332952a" (UID: "0decf941-a6e4-485f-afd4-7972d332952a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.224128 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/175a64fd-0187-4d28-87f1-76194cac1bf2-kube-api-access-dls9d" (OuterVolumeSpecName: "kube-api-access-dls9d") pod "175a64fd-0187-4d28-87f1-76194cac1bf2" (UID: "175a64fd-0187-4d28-87f1-76194cac1bf2"). InnerVolumeSpecName "kube-api-access-dls9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.224416 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0decf941-a6e4-485f-afd4-7972d332952a-kube-api-access-qcngh" (OuterVolumeSpecName: "kube-api-access-qcngh") pod "0decf941-a6e4-485f-afd4-7972d332952a" (UID: "0decf941-a6e4-485f-afd4-7972d332952a"). InnerVolumeSpecName "kube-api-access-qcngh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.282296 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0decf941-a6e4-485f-afd4-7972d332952a" (UID: "0decf941-a6e4-485f-afd4-7972d332952a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.290020 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "175a64fd-0187-4d28-87f1-76194cac1bf2" (UID: "175a64fd-0187-4d28-87f1-76194cac1bf2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.317976 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.318011 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0decf941-a6e4-485f-afd4-7972d332952a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.318025 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dls9d\" (UniqueName: \"kubernetes.io/projected/175a64fd-0187-4d28-87f1-76194cac1bf2-kube-api-access-dls9d\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.318037 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcngh\" (UniqueName: \"kubernetes.io/projected/0decf941-a6e4-485f-afd4-7972d332952a-kube-api-access-qcngh\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.318047 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.318055 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175a64fd-0187-4d28-87f1-76194cac1bf2-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.330573 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.338045 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.357319 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.421175 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-catalog-content\") pod \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.421253 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-operator-metrics\") pod \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.421507 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-trusted-ca\") pod \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.421631 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jxjd\" (UniqueName: \"kubernetes.io/projected/409596b6-1fa0-416d-b5a3-a06c2e36c15b-kube-api-access-4jxjd\") pod \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\" (UID: \"409596b6-1fa0-416d-b5a3-a06c2e36c15b\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.421670 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-utilities\") pod \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.421693 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8v54\" (UniqueName: \"kubernetes.io/projected/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-kube-api-access-b8v54\") pod \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\" (UID: \"e3a99522-0cda-4894-8d9a-bc8aaa7763e3\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.425865 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/409596b6-1fa0-416d-b5a3-a06c2e36c15b-kube-api-access-4jxjd" (OuterVolumeSpecName: "kube-api-access-4jxjd") pod "409596b6-1fa0-416d-b5a3-a06c2e36c15b" (UID: "409596b6-1fa0-416d-b5a3-a06c2e36c15b"). InnerVolumeSpecName "kube-api-access-4jxjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.426773 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-kube-api-access-b8v54" (OuterVolumeSpecName: "kube-api-access-b8v54") pod "e3a99522-0cda-4894-8d9a-bc8aaa7763e3" (UID: "e3a99522-0cda-4894-8d9a-bc8aaa7763e3"). InnerVolumeSpecName "kube-api-access-b8v54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.426872 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "409596b6-1fa0-416d-b5a3-a06c2e36c15b" (UID: "409596b6-1fa0-416d-b5a3-a06c2e36c15b"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.427585 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-utilities" (OuterVolumeSpecName: "utilities") pod "e3a99522-0cda-4894-8d9a-bc8aaa7763e3" (UID: "e3a99522-0cda-4894-8d9a-bc8aaa7763e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.428108 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "409596b6-1fa0-416d-b5a3-a06c2e36c15b" (UID: "409596b6-1fa0-416d-b5a3-a06c2e36c15b"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.440322 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5qfdd"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.449316 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3a99522-0cda-4894-8d9a-bc8aaa7763e3" (UID: "e3a99522-0cda-4894-8d9a-bc8aaa7763e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.522654 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-catalog-content\") pod \"f4fae2f4-952d-43c7-b7a2-55c898273973\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.522832 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg8vg\" (UniqueName: \"kubernetes.io/projected/f4fae2f4-952d-43c7-b7a2-55c898273973-kube-api-access-vg8vg\") pod \"f4fae2f4-952d-43c7-b7a2-55c898273973\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.522861 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-utilities\") pod \"f4fae2f4-952d-43c7-b7a2-55c898273973\" (UID: \"f4fae2f4-952d-43c7-b7a2-55c898273973\") " Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.523065 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jxjd\" (UniqueName: \"kubernetes.io/projected/409596b6-1fa0-416d-b5a3-a06c2e36c15b-kube-api-access-4jxjd\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.523083 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.523093 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8v54\" (UniqueName: \"kubernetes.io/projected/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-kube-api-access-b8v54\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.523102 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a99522-0cda-4894-8d9a-bc8aaa7763e3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.523114 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.523123 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/409596b6-1fa0-416d-b5a3-a06c2e36c15b-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.523722 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-utilities" (OuterVolumeSpecName: "utilities") pod "f4fae2f4-952d-43c7-b7a2-55c898273973" (UID: "f4fae2f4-952d-43c7-b7a2-55c898273973"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.527454 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4fae2f4-952d-43c7-b7a2-55c898273973-kube-api-access-vg8vg" (OuterVolumeSpecName: "kube-api-access-vg8vg") pod "f4fae2f4-952d-43c7-b7a2-55c898273973" (UID: "f4fae2f4-952d-43c7-b7a2-55c898273973"). InnerVolumeSpecName "kube-api-access-vg8vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.624028 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg8vg\" (UniqueName: \"kubernetes.io/projected/f4fae2f4-952d-43c7-b7a2-55c898273973-kube-api-access-vg8vg\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.624350 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.665232 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4fae2f4-952d-43c7-b7a2-55c898273973" (UID: "f4fae2f4-952d-43c7-b7a2-55c898273973"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.726143 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fae2f4-952d-43c7-b7a2-55c898273973-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.835310 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g287b" event={"ID":"f4fae2f4-952d-43c7-b7a2-55c898273973","Type":"ContainerDied","Data":"e45df30a5f4ecfaa1c617dc6bf010b56b23d054c91c993b064b67a7b613f61de"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.835407 4791 scope.go:117] "RemoveContainer" containerID="0c45b6c3589cbf6c00821b2a723921ccdf269c08d677ab5f1e48ce6bed5e36b1" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.835338 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g287b" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.837229 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" event={"ID":"409596b6-1fa0-416d-b5a3-a06c2e36c15b","Type":"ContainerDied","Data":"5aad25904232fdaaa877cc4f3b26c511e4dc7dfdcdc136eea6062a8250593b45"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.837326 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w9zwt" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.843146 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbbd4" event={"ID":"175a64fd-0187-4d28-87f1-76194cac1bf2","Type":"ContainerDied","Data":"90c5f19fe2c420ed1a1ffbe28bdf56c9cf6ebd4197ae67d5aa706248f924c1c5"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.843286 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbbd4" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.853038 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck6br" event={"ID":"e3a99522-0cda-4894-8d9a-bc8aaa7763e3","Type":"ContainerDied","Data":"b74f0ce49c959597060eb64a19e9913d57d1a1938c4a75753c46c773ed3f719b"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.853128 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck6br" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.856287 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" event={"ID":"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe","Type":"ContainerStarted","Data":"b9c17f6f9bc1be2bb6784562811401c9596ea62f81e010ca234c7b6367fc9e57"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.856323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" event={"ID":"9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe","Type":"ContainerStarted","Data":"b9a9c4d12c0cc00c4f8f22ce536ea18e88305a0eef248f3e7382949194e5676c"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.857467 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.858022 4791 scope.go:117] "RemoveContainer" containerID="057716cd859d2fecd94372f1c30e482ce3e006bc4ff39a095314f9782b81253d" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.859451 4791 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5qfdd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" start-of-body= Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.859496 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" podUID="9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.862567 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npdcj" event={"ID":"0decf941-a6e4-485f-afd4-7972d332952a","Type":"ContainerDied","Data":"74baff70fede653d5c0b8a44fba98e179b1e2f8262c5ffc82b6504d8a88b4086"} Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.862671 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npdcj" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.874418 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g287b"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.878775 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g287b"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.889867 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" podStartSLOduration=1.889844801 podStartE2EDuration="1.889844801s" podCreationTimestamp="2025-12-08 21:23:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:23:26.889661796 +0000 UTC m=+283.588420151" watchObservedRunningTime="2025-12-08 21:23:26.889844801 +0000 UTC m=+283.588603146" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.909127 4791 scope.go:117] "RemoveContainer" containerID="f358533d044f599cab97b460fad5940e9c0f64873e44dc3248ff8e5552ce968b" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.915668 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w9zwt"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.924140 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w9zwt"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.925534 4791 scope.go:117] "RemoveContainer" containerID="ae34794ab6b94f309fcdc6e544ada2683cf7c3f329649c2ab87638491e5e8df5" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.935776 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mbbd4"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.942913 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mbbd4"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.962755 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-npdcj"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.967378 4791 scope.go:117] "RemoveContainer" containerID="c58f081ecf66fdacb92330b44f7c9641e832576bc3c29cebfe72777ed49582e7" Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.970210 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-npdcj"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.974669 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck6br"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.977753 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck6br"] Dec 08 21:23:26 crc kubenswrapper[4791]: I1208 21:23:26.988787 4791 scope.go:117] "RemoveContainer" containerID="92c1f96e0fab36b85069cb648bc43c934e0984c62caba0396b8244061fb7a923" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.009079 4791 scope.go:117] "RemoveContainer" containerID="9356ac4bc6716c458253c8ab544897a7b7e1405f240e7744b2b65540420edbbc" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.028264 4791 scope.go:117] "RemoveContainer" containerID="f37ff2d8945007128451f0ea660ea36b550c924d6985bd3540e6b54d26059ef2" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.043500 4791 scope.go:117] "RemoveContainer" containerID="6386fac06704b56cce18f7e93445d8e7a59feb94966e3e6dfeb1bf52d9d13435" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.064130 4791 scope.go:117] "RemoveContainer" containerID="41c0878d385be8f173879303f5a73c46b9774cfb9fb2da3f6e3710f0cf29448c" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.077381 4791 scope.go:117] "RemoveContainer" containerID="b13f0a62f14238b27f87c6bbc7ea4f8b347d0e1f448030f2c478f7fa08d7d7a0" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.097968 4791 scope.go:117] "RemoveContainer" containerID="5b08c9d8990e49d9a7082c8f1372527c08e0452ad30a7aeac03cba5a05d5d863" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.119247 4791 scope.go:117] "RemoveContainer" containerID="fa6d6768491dba1ca6277230431287d3f7f5aa712e2f4983b82d1dab48a2facd" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.606149 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0decf941-a6e4-485f-afd4-7972d332952a" path="/var/lib/kubelet/pods/0decf941-a6e4-485f-afd4-7972d332952a/volumes" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.606910 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" path="/var/lib/kubelet/pods/175a64fd-0187-4d28-87f1-76194cac1bf2/volumes" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.607543 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" path="/var/lib/kubelet/pods/409596b6-1fa0-416d-b5a3-a06c2e36c15b/volumes" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.608456 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" path="/var/lib/kubelet/pods/e3a99522-0cda-4894-8d9a-bc8aaa7763e3/volumes" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.609071 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" path="/var/lib/kubelet/pods/f4fae2f4-952d-43c7-b7a2-55c898273973/volumes" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610035 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sqxkn"] Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610248 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610267 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610277 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610283 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610294 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610302 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610311 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610316 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610325 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610332 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610340 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610346 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610356 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610362 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610373 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610379 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610385 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610391 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="extract-utilities" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610398 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610405 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610412 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610417 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610428 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610435 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="extract-content" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610443 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610449 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: E1208 21:23:27.610458 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610465 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610567 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="175a64fd-0187-4d28-87f1-76194cac1bf2" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610585 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4fae2f4-952d-43c7-b7a2-55c898273973" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610593 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0decf941-a6e4-485f-afd4-7972d332952a" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610600 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610610 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3a99522-0cda-4894-8d9a-bc8aaa7763e3" containerName="registry-server" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.610622 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="409596b6-1fa0-416d-b5a3-a06c2e36c15b" containerName="marketplace-operator" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.611449 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sqxkn"] Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.611545 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.616915 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.741618 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9t5b\" (UniqueName: \"kubernetes.io/projected/441a316f-5101-41ba-ac80-0065189657da-kube-api-access-n9t5b\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.742138 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/441a316f-5101-41ba-ac80-0065189657da-catalog-content\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.742189 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/441a316f-5101-41ba-ac80-0065189657da-utilities\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.803573 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hv7kc"] Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.810057 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hv7kc"] Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.810186 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.814662 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.843611 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/441a316f-5101-41ba-ac80-0065189657da-catalog-content\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.843728 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/441a316f-5101-41ba-ac80-0065189657da-utilities\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.843785 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9t5b\" (UniqueName: \"kubernetes.io/projected/441a316f-5101-41ba-ac80-0065189657da-kube-api-access-n9t5b\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.844158 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/441a316f-5101-41ba-ac80-0065189657da-catalog-content\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.844335 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/441a316f-5101-41ba-ac80-0065189657da-utilities\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.869363 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9t5b\" (UniqueName: \"kubernetes.io/projected/441a316f-5101-41ba-ac80-0065189657da-kube-api-access-n9t5b\") pod \"redhat-marketplace-sqxkn\" (UID: \"441a316f-5101-41ba-ac80-0065189657da\") " pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.881491 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5qfdd" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.928233 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.945313 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfh4d\" (UniqueName: \"kubernetes.io/projected/5f13498f-b751-484e-bfd1-8ea09222f482-kube-api-access-vfh4d\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.945538 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-utilities\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:27 crc kubenswrapper[4791]: I1208 21:23:27.945862 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-catalog-content\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.047851 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-catalog-content\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.048779 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfh4d\" (UniqueName: \"kubernetes.io/projected/5f13498f-b751-484e-bfd1-8ea09222f482-kube-api-access-vfh4d\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.048919 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-catalog-content\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.050425 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-utilities\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.051426 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-utilities\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.093614 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfh4d\" (UniqueName: \"kubernetes.io/projected/5f13498f-b751-484e-bfd1-8ea09222f482-kube-api-access-vfh4d\") pod \"certified-operators-hv7kc\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.138031 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.352978 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sqxkn"] Dec 08 21:23:28 crc kubenswrapper[4791]: W1208 21:23:28.361205 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod441a316f_5101_41ba_ac80_0065189657da.slice/crio-1fd36e873a0158bdf4e5d276a09f4caac40da4be01db65b3f58fb605e352d5ed WatchSource:0}: Error finding container 1fd36e873a0158bdf4e5d276a09f4caac40da4be01db65b3f58fb605e352d5ed: Status 404 returned error can't find the container with id 1fd36e873a0158bdf4e5d276a09f4caac40da4be01db65b3f58fb605e352d5ed Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.528404 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hv7kc"] Dec 08 21:23:28 crc kubenswrapper[4791]: W1208 21:23:28.541203 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f13498f_b751_484e_bfd1_8ea09222f482.slice/crio-77790f0f558c386e7a0c2c13e5810c876f4df0d3b737fdf5648ee08462c76acd WatchSource:0}: Error finding container 77790f0f558c386e7a0c2c13e5810c876f4df0d3b737fdf5648ee08462c76acd: Status 404 returned error can't find the container with id 77790f0f558c386e7a0c2c13e5810c876f4df0d3b737fdf5648ee08462c76acd Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.902356 4791 generic.go:334] "Generic (PLEG): container finished" podID="441a316f-5101-41ba-ac80-0065189657da" containerID="abf6b2300fa4e34bcb1fdd8e1b1c85fe5d43b6da8f81e8574c090b7158dbcbbf" exitCode=0 Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.902439 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqxkn" event={"ID":"441a316f-5101-41ba-ac80-0065189657da","Type":"ContainerDied","Data":"abf6b2300fa4e34bcb1fdd8e1b1c85fe5d43b6da8f81e8574c090b7158dbcbbf"} Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.902475 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqxkn" event={"ID":"441a316f-5101-41ba-ac80-0065189657da","Type":"ContainerStarted","Data":"1fd36e873a0158bdf4e5d276a09f4caac40da4be01db65b3f58fb605e352d5ed"} Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.906263 4791 generic.go:334] "Generic (PLEG): container finished" podID="5f13498f-b751-484e-bfd1-8ea09222f482" containerID="13cba54449fb93ad0da3c0d93d5ab707b4971d48a64486e46b6ba1036aac106c" exitCode=0 Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.907956 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hv7kc" event={"ID":"5f13498f-b751-484e-bfd1-8ea09222f482","Type":"ContainerDied","Data":"13cba54449fb93ad0da3c0d93d5ab707b4971d48a64486e46b6ba1036aac106c"} Dec 08 21:23:28 crc kubenswrapper[4791]: I1208 21:23:28.907998 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hv7kc" event={"ID":"5f13498f-b751-484e-bfd1-8ea09222f482","Type":"ContainerStarted","Data":"77790f0f558c386e7a0c2c13e5810c876f4df0d3b737fdf5648ee08462c76acd"} Dec 08 21:23:29 crc kubenswrapper[4791]: I1208 21:23:29.914194 4791 generic.go:334] "Generic (PLEG): container finished" podID="441a316f-5101-41ba-ac80-0065189657da" containerID="ce9e382f6b426fbc8299c2eadb7873f9111af1592a54aefd569827770a95f903" exitCode=0 Dec 08 21:23:29 crc kubenswrapper[4791]: I1208 21:23:29.914404 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqxkn" event={"ID":"441a316f-5101-41ba-ac80-0065189657da","Type":"ContainerDied","Data":"ce9e382f6b426fbc8299c2eadb7873f9111af1592a54aefd569827770a95f903"} Dec 08 21:23:29 crc kubenswrapper[4791]: I1208 21:23:29.916966 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hv7kc" event={"ID":"5f13498f-b751-484e-bfd1-8ea09222f482","Type":"ContainerStarted","Data":"13308b864af5eaddab524110c185d0f7e3e03872bd6166197438c0f467042a18"} Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.001370 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sv4g6"] Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.003960 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.005109 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sv4g6"] Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.006023 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.185203 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-catalog-content\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.185276 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-utilities\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.185421 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l477c\" (UniqueName: \"kubernetes.io/projected/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-kube-api-access-l477c\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.201212 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l7wkb"] Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.204489 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.216995 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.220358 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l7wkb"] Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.287424 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-utilities\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.287483 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l477c\" (UniqueName: \"kubernetes.io/projected/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-kube-api-access-l477c\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.287576 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-catalog-content\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.288120 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-catalog-content\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.288183 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-utilities\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.317305 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l477c\" (UniqueName: \"kubernetes.io/projected/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-kube-api-access-l477c\") pod \"redhat-operators-sv4g6\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.389106 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-utilities\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.389431 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-catalog-content\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.389516 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x4gh\" (UniqueName: \"kubernetes.io/projected/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-kube-api-access-8x4gh\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.396436 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.491057 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x4gh\" (UniqueName: \"kubernetes.io/projected/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-kube-api-access-8x4gh\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.491122 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-utilities\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.491150 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-catalog-content\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.491800 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-catalog-content\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.492275 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-utilities\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.513738 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x4gh\" (UniqueName: \"kubernetes.io/projected/f60fb0b8-f530-4722-aacb-2d39fcf03ee2-kube-api-access-8x4gh\") pod \"community-operators-l7wkb\" (UID: \"f60fb0b8-f530-4722-aacb-2d39fcf03ee2\") " pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.531437 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.806635 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sv4g6"] Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.926099 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqxkn" event={"ID":"441a316f-5101-41ba-ac80-0065189657da","Type":"ContainerStarted","Data":"607b9a609bed418130f58710f9d1de6929deb4bbe15f045643666794e0bb9f51"} Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.928561 4791 generic.go:334] "Generic (PLEG): container finished" podID="5f13498f-b751-484e-bfd1-8ea09222f482" containerID="13308b864af5eaddab524110c185d0f7e3e03872bd6166197438c0f467042a18" exitCode=0 Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.928833 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hv7kc" event={"ID":"5f13498f-b751-484e-bfd1-8ea09222f482","Type":"ContainerDied","Data":"13308b864af5eaddab524110c185d0f7e3e03872bd6166197438c0f467042a18"} Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.931761 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv4g6" event={"ID":"bff6eb2e-73f5-4c5c-8785-dca5aebf0619","Type":"ContainerStarted","Data":"efee163659e83ad5ef2a9eddd1c31973c42aadec3a6a54bcab0f0d84305336b4"} Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.973904 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sqxkn" podStartSLOduration=2.552728613 podStartE2EDuration="3.97387643s" podCreationTimestamp="2025-12-08 21:23:27 +0000 UTC" firstStartedPulling="2025-12-08 21:23:28.903898792 +0000 UTC m=+285.602657137" lastFinishedPulling="2025-12-08 21:23:30.325046609 +0000 UTC m=+287.023804954" observedRunningTime="2025-12-08 21:23:30.960058932 +0000 UTC m=+287.658817277" watchObservedRunningTime="2025-12-08 21:23:30.97387643 +0000 UTC m=+287.672634775" Dec 08 21:23:30 crc kubenswrapper[4791]: I1208 21:23:30.977957 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l7wkb"] Dec 08 21:23:31 crc kubenswrapper[4791]: I1208 21:23:31.939373 4791 generic.go:334] "Generic (PLEG): container finished" podID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerID="0fd976ec95eb972d6a82d15eb6deda35563e7e21b774bdec0eebf62b4ab10c7d" exitCode=0 Dec 08 21:23:31 crc kubenswrapper[4791]: I1208 21:23:31.939522 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv4g6" event={"ID":"bff6eb2e-73f5-4c5c-8785-dca5aebf0619","Type":"ContainerDied","Data":"0fd976ec95eb972d6a82d15eb6deda35563e7e21b774bdec0eebf62b4ab10c7d"} Dec 08 21:23:31 crc kubenswrapper[4791]: I1208 21:23:31.944110 4791 generic.go:334] "Generic (PLEG): container finished" podID="f60fb0b8-f530-4722-aacb-2d39fcf03ee2" containerID="5f2aa2d53cbf92edad019f05da239a9fe736ea0bdcfd594262472d606243097d" exitCode=0 Dec 08 21:23:31 crc kubenswrapper[4791]: I1208 21:23:31.944235 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7wkb" event={"ID":"f60fb0b8-f530-4722-aacb-2d39fcf03ee2","Type":"ContainerDied","Data":"5f2aa2d53cbf92edad019f05da239a9fe736ea0bdcfd594262472d606243097d"} Dec 08 21:23:31 crc kubenswrapper[4791]: I1208 21:23:31.944359 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7wkb" event={"ID":"f60fb0b8-f530-4722-aacb-2d39fcf03ee2","Type":"ContainerStarted","Data":"76673b62b5f213d029f0d39fc93a489463eba797c48b1ca4cb5641b082c7afe2"} Dec 08 21:23:31 crc kubenswrapper[4791]: I1208 21:23:31.952306 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hv7kc" event={"ID":"5f13498f-b751-484e-bfd1-8ea09222f482","Type":"ContainerStarted","Data":"8f536c3aab16a1971f93f7d70f982a5751006a8bb30e7df4919931e23bd4191e"} Dec 08 21:23:32 crc kubenswrapper[4791]: I1208 21:23:32.024076 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hv7kc" podStartSLOduration=2.5899833450000003 podStartE2EDuration="5.024045919s" podCreationTimestamp="2025-12-08 21:23:27 +0000 UTC" firstStartedPulling="2025-12-08 21:23:28.911103733 +0000 UTC m=+285.609862088" lastFinishedPulling="2025-12-08 21:23:31.345166317 +0000 UTC m=+288.043924662" observedRunningTime="2025-12-08 21:23:32.02366758 +0000 UTC m=+288.722425925" watchObservedRunningTime="2025-12-08 21:23:32.024045919 +0000 UTC m=+288.722804274" Dec 08 21:23:32 crc kubenswrapper[4791]: I1208 21:23:32.961446 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7wkb" event={"ID":"f60fb0b8-f530-4722-aacb-2d39fcf03ee2","Type":"ContainerStarted","Data":"38e6be5d27556f7237167805ff696db126c91666ee888a16ce28e4147445b680"} Dec 08 21:23:32 crc kubenswrapper[4791]: I1208 21:23:32.965440 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv4g6" event={"ID":"bff6eb2e-73f5-4c5c-8785-dca5aebf0619","Type":"ContainerStarted","Data":"44b02f9e415eaca12b17b10f2343e7aec67b80fad3a3319ba949a92a0fd0865b"} Dec 08 21:23:33 crc kubenswrapper[4791]: I1208 21:23:33.971415 4791 generic.go:334] "Generic (PLEG): container finished" podID="f60fb0b8-f530-4722-aacb-2d39fcf03ee2" containerID="38e6be5d27556f7237167805ff696db126c91666ee888a16ce28e4147445b680" exitCode=0 Dec 08 21:23:33 crc kubenswrapper[4791]: I1208 21:23:33.971539 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7wkb" event={"ID":"f60fb0b8-f530-4722-aacb-2d39fcf03ee2","Type":"ContainerDied","Data":"38e6be5d27556f7237167805ff696db126c91666ee888a16ce28e4147445b680"} Dec 08 21:23:33 crc kubenswrapper[4791]: I1208 21:23:33.974203 4791 generic.go:334] "Generic (PLEG): container finished" podID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerID="44b02f9e415eaca12b17b10f2343e7aec67b80fad3a3319ba949a92a0fd0865b" exitCode=0 Dec 08 21:23:33 crc kubenswrapper[4791]: I1208 21:23:33.974263 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv4g6" event={"ID":"bff6eb2e-73f5-4c5c-8785-dca5aebf0619","Type":"ContainerDied","Data":"44b02f9e415eaca12b17b10f2343e7aec67b80fad3a3319ba949a92a0fd0865b"} Dec 08 21:23:34 crc kubenswrapper[4791]: I1208 21:23:34.989133 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7wkb" event={"ID":"f60fb0b8-f530-4722-aacb-2d39fcf03ee2","Type":"ContainerStarted","Data":"c7c04d4165f597350a9855ed580c371638eda0bba1a6baad61f290ca7ac97fa0"} Dec 08 21:23:34 crc kubenswrapper[4791]: I1208 21:23:34.993329 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv4g6" event={"ID":"bff6eb2e-73f5-4c5c-8785-dca5aebf0619","Type":"ContainerStarted","Data":"095c4c0b9c9231b9bcfe552cbe4b35a0305bb4c7126fb4f2502626feb28492ca"} Dec 08 21:23:35 crc kubenswrapper[4791]: I1208 21:23:35.033359 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sv4g6" podStartSLOduration=3.606920378 podStartE2EDuration="6.033339291s" podCreationTimestamp="2025-12-08 21:23:29 +0000 UTC" firstStartedPulling="2025-12-08 21:23:31.941154369 +0000 UTC m=+288.639912714" lastFinishedPulling="2025-12-08 21:23:34.367573282 +0000 UTC m=+291.066331627" observedRunningTime="2025-12-08 21:23:35.032722308 +0000 UTC m=+291.731480663" watchObservedRunningTime="2025-12-08 21:23:35.033339291 +0000 UTC m=+291.732097636" Dec 08 21:23:35 crc kubenswrapper[4791]: I1208 21:23:35.037111 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l7wkb" podStartSLOduration=2.596227959 podStartE2EDuration="5.037098825s" podCreationTimestamp="2025-12-08 21:23:30 +0000 UTC" firstStartedPulling="2025-12-08 21:23:31.948565684 +0000 UTC m=+288.647324029" lastFinishedPulling="2025-12-08 21:23:34.38943655 +0000 UTC m=+291.088194895" observedRunningTime="2025-12-08 21:23:35.012350523 +0000 UTC m=+291.711108868" watchObservedRunningTime="2025-12-08 21:23:35.037098825 +0000 UTC m=+291.735857170" Dec 08 21:23:37 crc kubenswrapper[4791]: I1208 21:23:37.932025 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:37 crc kubenswrapper[4791]: I1208 21:23:37.932579 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:37 crc kubenswrapper[4791]: I1208 21:23:37.998855 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:38 crc kubenswrapper[4791]: I1208 21:23:38.055286 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sqxkn" Dec 08 21:23:38 crc kubenswrapper[4791]: I1208 21:23:38.139896 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:38 crc kubenswrapper[4791]: I1208 21:23:38.139991 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:38 crc kubenswrapper[4791]: I1208 21:23:38.182197 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:39 crc kubenswrapper[4791]: I1208 21:23:39.066549 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:23:40 crc kubenswrapper[4791]: I1208 21:23:40.396532 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:40 crc kubenswrapper[4791]: I1208 21:23:40.396827 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:40 crc kubenswrapper[4791]: I1208 21:23:40.434811 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:40 crc kubenswrapper[4791]: I1208 21:23:40.532182 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:40 crc kubenswrapper[4791]: I1208 21:23:40.532239 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:40 crc kubenswrapper[4791]: I1208 21:23:40.566640 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:41 crc kubenswrapper[4791]: I1208 21:23:41.075853 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:23:41 crc kubenswrapper[4791]: I1208 21:23:41.079345 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l7wkb" Dec 08 21:23:43 crc kubenswrapper[4791]: I1208 21:23:43.451126 4791 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.341304 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws"] Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.342849 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.346578 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-dockercfg-wwt9l" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.350309 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-root-ca.crt" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.350413 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"openshift-service-ca.crt" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.350333 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-tls" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.350664 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"telemetry-config" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.361088 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws"] Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.467246 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/dace1897-6a81-47f4-b5c3-8f68cabdffda-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.467319 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/dace1897-6a81-47f4-b5c3-8f68cabdffda-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.467369 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2skh9\" (UniqueName: \"kubernetes.io/projected/dace1897-6a81-47f4-b5c3-8f68cabdffda-kube-api-access-2skh9\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.569162 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/dace1897-6a81-47f4-b5c3-8f68cabdffda-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.569264 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/dace1897-6a81-47f4-b5c3-8f68cabdffda-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.569341 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2skh9\" (UniqueName: \"kubernetes.io/projected/dace1897-6a81-47f4-b5c3-8f68cabdffda-kube-api-access-2skh9\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.570824 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/dace1897-6a81-47f4-b5c3-8f68cabdffda-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.578823 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/dace1897-6a81-47f4-b5c3-8f68cabdffda-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.591631 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2skh9\" (UniqueName: \"kubernetes.io/projected/dace1897-6a81-47f4-b5c3-8f68cabdffda-kube-api-access-2skh9\") pod \"cluster-monitoring-operator-6d5b84845-vj8ws\" (UID: \"dace1897-6a81-47f4-b5c3-8f68cabdffda\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:56 crc kubenswrapper[4791]: I1208 21:23:56.662255 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" Dec 08 21:23:57 crc kubenswrapper[4791]: I1208 21:23:57.068086 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws"] Dec 08 21:23:57 crc kubenswrapper[4791]: I1208 21:23:57.124866 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" event={"ID":"dace1897-6a81-47f4-b5c3-8f68cabdffda","Type":"ContainerStarted","Data":"a0b940a2fff91c8c1f5f15c197eb732a393487c2f25648c1e533fa628d1e0ba6"} Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.137661 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" event={"ID":"dace1897-6a81-47f4-b5c3-8f68cabdffda","Type":"ContainerStarted","Data":"0afc57fbb3889c2aaf153350033fdcd9ee086a8923dda46ea135ccf63b197758"} Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.152696 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-vj8ws" podStartSLOduration=1.3559741490000001 podStartE2EDuration="3.152673339s" podCreationTimestamp="2025-12-08 21:23:56 +0000 UTC" firstStartedPulling="2025-12-08 21:23:57.078980297 +0000 UTC m=+313.777738662" lastFinishedPulling="2025-12-08 21:23:58.875679507 +0000 UTC m=+315.574437852" observedRunningTime="2025-12-08 21:23:59.151630796 +0000 UTC m=+315.850389161" watchObservedRunningTime="2025-12-08 21:23:59.152673339 +0000 UTC m=+315.851431684" Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.466996 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h"] Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.467784 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.469430 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-dockercfg-w975z" Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.469935 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-tls" Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.482428 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h"] Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.618166 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/83070342-66ee-4da7-85b0-f09139e1a776-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-z6x4h\" (UID: \"83070342-66ee-4da7-85b0-f09139e1a776\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.720019 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/83070342-66ee-4da7-85b0-f09139e1a776-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-z6x4h\" (UID: \"83070342-66ee-4da7-85b0-f09139e1a776\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.729808 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/83070342-66ee-4da7-85b0-f09139e1a776-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-z6x4h\" (UID: \"83070342-66ee-4da7-85b0-f09139e1a776\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" Dec 08 21:23:59 crc kubenswrapper[4791]: I1208 21:23:59.789116 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" Dec 08 21:24:00 crc kubenswrapper[4791]: I1208 21:24:00.177469 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h"] Dec 08 21:24:00 crc kubenswrapper[4791]: W1208 21:24:00.182825 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83070342_66ee_4da7_85b0_f09139e1a776.slice/crio-7d10af58bbf62c61a3aaf73680dc77efa8b2fb880df75aa0b13169cadce2d025 WatchSource:0}: Error finding container 7d10af58bbf62c61a3aaf73680dc77efa8b2fb880df75aa0b13169cadce2d025: Status 404 returned error can't find the container with id 7d10af58bbf62c61a3aaf73680dc77efa8b2fb880df75aa0b13169cadce2d025 Dec 08 21:24:01 crc kubenswrapper[4791]: I1208 21:24:01.154960 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" event={"ID":"83070342-66ee-4da7-85b0-f09139e1a776","Type":"ContainerStarted","Data":"7d10af58bbf62c61a3aaf73680dc77efa8b2fb880df75aa0b13169cadce2d025"} Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.162360 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" event={"ID":"83070342-66ee-4da7-85b0-f09139e1a776","Type":"ContainerStarted","Data":"bc6d815bf7e9c64f099aceb6f307774d5a59d620fb8770848490598a1b88ddde"} Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.164181 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.168539 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.177677 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-z6x4h" podStartSLOduration=1.381495975 podStartE2EDuration="3.177655233s" podCreationTimestamp="2025-12-08 21:23:59 +0000 UTC" firstStartedPulling="2025-12-08 21:24:00.185308246 +0000 UTC m=+316.884066591" lastFinishedPulling="2025-12-08 21:24:01.981467504 +0000 UTC m=+318.680225849" observedRunningTime="2025-12-08 21:24:02.174511083 +0000 UTC m=+318.873269428" watchObservedRunningTime="2025-12-08 21:24:02.177655233 +0000 UTC m=+318.876413578" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.546724 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-cnb8n"] Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.547809 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.551505 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-tls" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.551543 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-client-ca" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.551610 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-dockercfg-bkbfz" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.552306 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-kube-rbac-proxy-config" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.561226 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-cnb8n"] Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.657047 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r58w\" (UniqueName: \"kubernetes.io/projected/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-kube-api-access-4r58w\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.657475 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.657604 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.657786 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-metrics-client-ca\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.759241 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r58w\" (UniqueName: \"kubernetes.io/projected/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-kube-api-access-4r58w\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.759578 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.759679 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.759792 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-metrics-client-ca\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.760832 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-metrics-client-ca\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.841630 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.841763 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.842324 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r58w\" (UniqueName: \"kubernetes.io/projected/80c54a6b-f3f4-4d41-98cc-a7c79a1310a3-kube-api-access-4r58w\") pod \"prometheus-operator-db54df47d-cnb8n\" (UID: \"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3\") " pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:02 crc kubenswrapper[4791]: I1208 21:24:02.863850 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" Dec 08 21:24:03 crc kubenswrapper[4791]: I1208 21:24:03.254456 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-cnb8n"] Dec 08 21:24:03 crc kubenswrapper[4791]: W1208 21:24:03.258865 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80c54a6b_f3f4_4d41_98cc_a7c79a1310a3.slice/crio-d660e9b301d93be8510b4a3663c6df7511907c5b7fea8a797c41af10eca19a91 WatchSource:0}: Error finding container d660e9b301d93be8510b4a3663c6df7511907c5b7fea8a797c41af10eca19a91: Status 404 returned error can't find the container with id d660e9b301d93be8510b4a3663c6df7511907c5b7fea8a797c41af10eca19a91 Dec 08 21:24:04 crc kubenswrapper[4791]: I1208 21:24:04.174755 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" event={"ID":"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3","Type":"ContainerStarted","Data":"d660e9b301d93be8510b4a3663c6df7511907c5b7fea8a797c41af10eca19a91"} Dec 08 21:24:05 crc kubenswrapper[4791]: I1208 21:24:05.181252 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" event={"ID":"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3","Type":"ContainerStarted","Data":"5ae0167e662dee66b0351ebb3755ab1eee1b2fceffa62b8becc9afec992e1850"} Dec 08 21:24:06 crc kubenswrapper[4791]: I1208 21:24:06.188811 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" event={"ID":"80c54a6b-f3f4-4d41-98cc-a7c79a1310a3","Type":"ContainerStarted","Data":"171b99753c1c4715788d9ca28e7d48334160fae0447bbf99d6f46b7b004b6041"} Dec 08 21:24:06 crc kubenswrapper[4791]: I1208 21:24:06.211146 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-db54df47d-cnb8n" podStartSLOduration=2.53165114 podStartE2EDuration="4.211127944s" podCreationTimestamp="2025-12-08 21:24:02 +0000 UTC" firstStartedPulling="2025-12-08 21:24:03.261259247 +0000 UTC m=+319.960017592" lastFinishedPulling="2025-12-08 21:24:04.940736061 +0000 UTC m=+321.639494396" observedRunningTime="2025-12-08 21:24:06.208077176 +0000 UTC m=+322.906835531" watchObservedRunningTime="2025-12-08 21:24:06.211127944 +0000 UTC m=+322.909886279" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.874648 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4"] Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.876057 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.879881 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-kube-rbac-proxy-config" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.880177 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-dockercfg-ns72w" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.880400 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-tls" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.892514 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4"] Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.931358 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk"] Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.933682 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.963371 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/node-exporter-29ngm"] Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.965436 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.966663 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-kube-rbac-proxy-config" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.966885 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-tls" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.966996 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-state-metrics-custom-resource-state-configmap" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.971934 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk"] Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.975205 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-dockercfg-f5gt9" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.981828 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-tls" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.981845 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-dockercfg-mfp49" Dec 08 21:24:07 crc kubenswrapper[4791]: I1208 21:24:07.981873 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-kube-rbac-proxy-config" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.036636 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/048170b6-ffe5-402f-995d-431b35f154fc-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.036752 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/048170b6-ffe5-402f-995d-431b35f154fc-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.036802 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/048170b6-ffe5-402f-995d-431b35f154fc-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.036848 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqdz8\" (UniqueName: \"kubernetes.io/projected/048170b6-ffe5-402f-995d-431b35f154fc-kube-api-access-wqdz8\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154651 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-root\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154738 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/048170b6-ffe5-402f-995d-431b35f154fc-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154776 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gshkf\" (UniqueName: \"kubernetes.io/projected/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-api-access-gshkf\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154817 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-tls\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154850 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-sys\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154887 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154921 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/048170b6-ffe5-402f-995d-431b35f154fc-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154954 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-textfile\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.154987 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155017 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/a0252ac3-5f5c-4d97-835b-50f74c280cc9-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155041 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-wtmp\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155076 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgvbz\" (UniqueName: \"kubernetes.io/projected/d6d9a3d8-295b-4b2e-a789-5a61884835a4-kube-api-access-zgvbz\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155104 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/048170b6-ffe5-402f-995d-431b35f154fc-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155129 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/a0252ac3-5f5c-4d97-835b-50f74c280cc9-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155156 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155186 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d6d9a3d8-295b-4b2e-a789-5a61884835a4-metrics-client-ca\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155226 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqdz8\" (UniqueName: \"kubernetes.io/projected/048170b6-ffe5-402f-995d-431b35f154fc-kube-api-access-wqdz8\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.155255 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.156113 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/048170b6-ffe5-402f-995d-431b35f154fc-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.166858 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/048170b6-ffe5-402f-995d-431b35f154fc-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.173387 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/048170b6-ffe5-402f-995d-431b35f154fc-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.179547 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqdz8\" (UniqueName: \"kubernetes.io/projected/048170b6-ffe5-402f-995d-431b35f154fc-kube-api-access-wqdz8\") pod \"openshift-state-metrics-566fddb674-7vrw4\" (UID: \"048170b6-ffe5-402f-995d-431b35f154fc\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.196604 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255666 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-sys\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255727 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255753 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-textfile\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255781 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255812 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/a0252ac3-5f5c-4d97-835b-50f74c280cc9-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255828 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-wtmp\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255847 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgvbz\" (UniqueName: \"kubernetes.io/projected/d6d9a3d8-295b-4b2e-a789-5a61884835a4-kube-api-access-zgvbz\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255846 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-sys\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/a0252ac3-5f5c-4d97-835b-50f74c280cc9-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255893 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255922 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d6d9a3d8-295b-4b2e-a789-5a61884835a4-metrics-client-ca\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255962 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.255988 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-root\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.256012 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gshkf\" (UniqueName: \"kubernetes.io/projected/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-api-access-gshkf\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.256048 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-tls\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.256045 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-wtmp\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.256178 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"root\" (UniqueName: \"kubernetes.io/host-path/d6d9a3d8-295b-4b2e-a789-5a61884835a4-root\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.256495 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-textfile\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.256831 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/a0252ac3-5f5c-4d97-835b-50f74c280cc9-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.256883 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/a0252ac3-5f5c-4d97-835b-50f74c280cc9-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.257047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.258181 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d6d9a3d8-295b-4b2e-a789-5a61884835a4-metrics-client-ca\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.259351 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.259805 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.262535 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/d6d9a3d8-295b-4b2e-a789-5a61884835a4-node-exporter-tls\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.264088 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.273956 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gshkf\" (UniqueName: \"kubernetes.io/projected/a0252ac3-5f5c-4d97-835b-50f74c280cc9-kube-api-access-gshkf\") pod \"kube-state-metrics-777cb5bd5d-j79wk\" (UID: \"a0252ac3-5f5c-4d97-835b-50f74c280cc9\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.278344 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgvbz\" (UniqueName: \"kubernetes.io/projected/d6d9a3d8-295b-4b2e-a789-5a61884835a4-kube-api-access-zgvbz\") pod \"node-exporter-29ngm\" (UID: \"d6d9a3d8-295b-4b2e-a789-5a61884835a4\") " pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.282903 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.298028 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-29ngm" Dec 08 21:24:08 crc kubenswrapper[4791]: W1208 21:24:08.321989 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6d9a3d8_295b_4b2e_a789_5a61884835a4.slice/crio-78f4356a47a23b9779d699a0bf342b754487fec9597cb35a29fa0a64f385dcd1 WatchSource:0}: Error finding container 78f4356a47a23b9779d699a0bf342b754487fec9597cb35a29fa0a64f385dcd1: Status 404 returned error can't find the container with id 78f4356a47a23b9779d699a0bf342b754487fec9597cb35a29fa0a64f385dcd1 Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.619316 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4"] Dec 08 21:24:08 crc kubenswrapper[4791]: W1208 21:24:08.624334 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod048170b6_ffe5_402f_995d_431b35f154fc.slice/crio-09cf58845898fe8c5283914a3c531401f62416c50d1b41364b25d903879a1587 WatchSource:0}: Error finding container 09cf58845898fe8c5283914a3c531401f62416c50d1b41364b25d903879a1587: Status 404 returned error can't find the container with id 09cf58845898fe8c5283914a3c531401f62416c50d1b41364b25d903879a1587 Dec 08 21:24:08 crc kubenswrapper[4791]: I1208 21:24:08.712293 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk"] Dec 08 21:24:08 crc kubenswrapper[4791]: W1208 21:24:08.719015 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0252ac3_5f5c_4d97_835b_50f74c280cc9.slice/crio-c7181172fe70b4bd37989e4ca988994866210d1076752ebdd8b27ddfe89d370d WatchSource:0}: Error finding container c7181172fe70b4bd37989e4ca988994866210d1076752ebdd8b27ddfe89d370d: Status 404 returned error can't find the container with id c7181172fe70b4bd37989e4ca988994866210d1076752ebdd8b27ddfe89d370d Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.001921 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.004095 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.008763 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-web" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.008981 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-web-config" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.009110 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-generated" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.009692 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-metric" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.010046 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.010324 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.013368 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-dockercfg-xx62d" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.022874 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"alertmanager-trusted-ca-bundle" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.024689 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls-assets-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.041380 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.166587 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8e215aa-7580-46b7-a6a0-37e3f139cde7-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.166648 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f8e215aa-7580-46b7-a6a0-37e3f139cde7-tls-assets\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.166675 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/f8e215aa-7580-46b7-a6a0-37e3f139cde7-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.166718 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167069 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167101 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f8e215aa-7580-46b7-a6a0-37e3f139cde7-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167291 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167337 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-config-volume\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167362 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-web-config\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167387 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f8e215aa-7580-46b7-a6a0-37e3f139cde7-config-out\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167409 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.167442 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2cvb\" (UniqueName: \"kubernetes.io/projected/f8e215aa-7580-46b7-a6a0-37e3f139cde7-kube-api-access-m2cvb\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.204789 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" event={"ID":"048170b6-ffe5-402f-995d-431b35f154fc","Type":"ContainerStarted","Data":"09cf58845898fe8c5283914a3c531401f62416c50d1b41364b25d903879a1587"} Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.205996 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-29ngm" event={"ID":"d6d9a3d8-295b-4b2e-a789-5a61884835a4","Type":"ContainerStarted","Data":"78f4356a47a23b9779d699a0bf342b754487fec9597cb35a29fa0a64f385dcd1"} Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.207190 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" event={"ID":"a0252ac3-5f5c-4d97-835b-50f74c280cc9","Type":"ContainerStarted","Data":"c7181172fe70b4bd37989e4ca988994866210d1076752ebdd8b27ddfe89d370d"} Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268418 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/f8e215aa-7580-46b7-a6a0-37e3f139cde7-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268484 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268520 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268545 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f8e215aa-7580-46b7-a6a0-37e3f139cde7-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268606 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268631 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-config-volume\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268650 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-web-config\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268667 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f8e215aa-7580-46b7-a6a0-37e3f139cde7-config-out\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268684 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268889 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2cvb\" (UniqueName: \"kubernetes.io/projected/f8e215aa-7580-46b7-a6a0-37e3f139cde7-kube-api-access-m2cvb\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.268986 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/f8e215aa-7580-46b7-a6a0-37e3f139cde7-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.269667 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f8e215aa-7580-46b7-a6a0-37e3f139cde7-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.269984 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8e215aa-7580-46b7-a6a0-37e3f139cde7-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.270034 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f8e215aa-7580-46b7-a6a0-37e3f139cde7-tls-assets\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.270929 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8e215aa-7580-46b7-a6a0-37e3f139cde7-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.276317 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f8e215aa-7580-46b7-a6a0-37e3f139cde7-tls-assets\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.276434 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.276892 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.277216 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-config-volume\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.278162 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.279566 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.283145 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f8e215aa-7580-46b7-a6a0-37e3f139cde7-config-out\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.284219 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f8e215aa-7580-46b7-a6a0-37e3f139cde7-web-config\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.290616 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2cvb\" (UniqueName: \"kubernetes.io/projected/f8e215aa-7580-46b7-a6a0-37e3f139cde7-kube-api-access-m2cvb\") pod \"alertmanager-main-0\" (UID: \"f8e215aa-7580-46b7-a6a0-37e3f139cde7\") " pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.320382 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.782827 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.881924 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx"] Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.884119 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.884888 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx"] Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.887537 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-grpc-tls-acs41qq8ub4jf" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.887790 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-rules" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.887963 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-metrics" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.888139 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-web" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.888296 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-tls" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.888444 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy" Dec 08 21:24:09 crc kubenswrapper[4791]: I1208 21:24:09.888674 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-dockercfg-q54pq" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.082948 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/328971ba-ea1e-4447-8274-8d7dceec9f72-metrics-client-ca\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.083151 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwv7s\" (UniqueName: \"kubernetes.io/projected/328971ba-ea1e-4447-8274-8d7dceec9f72-kube-api-access-gwv7s\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.083185 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-grpc-tls\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.083318 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.084181 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.084254 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.084282 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-tls\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.084314 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: W1208 21:24:10.088557 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8e215aa_7580_46b7_a6a0_37e3f139cde7.slice/crio-22589cefc56757e4ece57196520fd9b8227e4e1b539f716473c70c0ffa80fef9 WatchSource:0}: Error finding container 22589cefc56757e4ece57196520fd9b8227e4e1b539f716473c70c0ffa80fef9: Status 404 returned error can't find the container with id 22589cefc56757e4ece57196520fd9b8227e4e1b539f716473c70c0ffa80fef9 Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.185787 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/328971ba-ea1e-4447-8274-8d7dceec9f72-metrics-client-ca\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186121 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwv7s\" (UniqueName: \"kubernetes.io/projected/328971ba-ea1e-4447-8274-8d7dceec9f72-kube-api-access-gwv7s\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186143 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-grpc-tls\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186193 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186242 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186286 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186344 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-tls\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186366 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.186736 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/328971ba-ea1e-4447-8274-8d7dceec9f72-metrics-client-ca\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.192123 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.192184 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.192411 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-tls\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.192439 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.193196 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-grpc-tls\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.193568 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/328971ba-ea1e-4447-8274-8d7dceec9f72-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.204109 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwv7s\" (UniqueName: \"kubernetes.io/projected/328971ba-ea1e-4447-8274-8d7dceec9f72-kube-api-access-gwv7s\") pod \"thanos-querier-54b8d7cb66-qrzdx\" (UID: \"328971ba-ea1e-4447-8274-8d7dceec9f72\") " pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.213310 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerStarted","Data":"22589cefc56757e4ece57196520fd9b8227e4e1b539f716473c70c0ffa80fef9"} Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.218256 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" event={"ID":"048170b6-ffe5-402f-995d-431b35f154fc","Type":"ContainerStarted","Data":"6f5e87c7208cad2c3d7820c85f3e92a426b8e442e7d752c43fb6e2e299fd9528"} Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.218355 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" event={"ID":"048170b6-ffe5-402f-995d-431b35f154fc","Type":"ContainerStarted","Data":"0faa155ef27c9d971176b86a10c5db72d2a26b52ac2dde55566282e33b79fc68"} Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.500261 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:10 crc kubenswrapper[4791]: I1208 21:24:10.944985 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx"] Dec 08 21:24:11 crc kubenswrapper[4791]: W1208 21:24:11.064779 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod328971ba_ea1e_4447_8274_8d7dceec9f72.slice/crio-33d592dee8d33a3a9fad5a3cc26bbaee0e2e434dd7584c3e1f102f976dd70f6e WatchSource:0}: Error finding container 33d592dee8d33a3a9fad5a3cc26bbaee0e2e434dd7584c3e1f102f976dd70f6e: Status 404 returned error can't find the container with id 33d592dee8d33a3a9fad5a3cc26bbaee0e2e434dd7584c3e1f102f976dd70f6e Dec 08 21:24:11 crc kubenswrapper[4791]: I1208 21:24:11.229624 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" event={"ID":"328971ba-ea1e-4447-8274-8d7dceec9f72","Type":"ContainerStarted","Data":"33d592dee8d33a3a9fad5a3cc26bbaee0e2e434dd7584c3e1f102f976dd70f6e"} Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.238814 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" event={"ID":"a0252ac3-5f5c-4d97-835b-50f74c280cc9","Type":"ContainerStarted","Data":"d1046f0b33c75db839046fe69d2916e5de9516916fcb08b1b579eea6aa17eb7e"} Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.239744 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" event={"ID":"a0252ac3-5f5c-4d97-835b-50f74c280cc9","Type":"ContainerStarted","Data":"28f7859581e6674514a6c8eacfb8fd5dac1b8d5c7e0091044312406c57331417"} Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.241547 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" event={"ID":"048170b6-ffe5-402f-995d-431b35f154fc","Type":"ContainerStarted","Data":"2f61d6628d4b48cf50401fe2b8c3c09fd0d8a0ec038213727ba7570d3d646922"} Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.244449 4791 generic.go:334] "Generic (PLEG): container finished" podID="f8e215aa-7580-46b7-a6a0-37e3f139cde7" containerID="b96af75726e8dab98ab6d97a3b160ecc94cc257fc195d5db8701e4a2bffbc610" exitCode=0 Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.244497 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerDied","Data":"b96af75726e8dab98ab6d97a3b160ecc94cc257fc195d5db8701e4a2bffbc610"} Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.247006 4791 generic.go:334] "Generic (PLEG): container finished" podID="d6d9a3d8-295b-4b2e-a789-5a61884835a4" containerID="0421d8a90fcb6ab314047d87f3568b9a9f861ecf9230e49c8eec7f2e5a3d6c04" exitCode=0 Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.247047 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-29ngm" event={"ID":"d6d9a3d8-295b-4b2e-a789-5a61884835a4","Type":"ContainerDied","Data":"0421d8a90fcb6ab314047d87f3568b9a9f861ecf9230e49c8eec7f2e5a3d6c04"} Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.381577 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/openshift-state-metrics-566fddb674-7vrw4" podStartSLOduration=3.35808188 podStartE2EDuration="5.381548007s" podCreationTimestamp="2025-12-08 21:24:07 +0000 UTC" firstStartedPulling="2025-12-08 21:24:09.52842576 +0000 UTC m=+326.227184105" lastFinishedPulling="2025-12-08 21:24:11.551891887 +0000 UTC m=+328.250650232" observedRunningTime="2025-12-08 21:24:12.354454099 +0000 UTC m=+329.053212464" watchObservedRunningTime="2025-12-08 21:24:12.381548007 +0000 UTC m=+329.080306352" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.702208 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-c8c89d49c-bjcpg"] Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.703549 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731217 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-oauth-config\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731298 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-oauth-serving-cert\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731346 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mtbf\" (UniqueName: \"kubernetes.io/projected/3cde98bf-1fc3-4460-879b-26802e310157-kube-api-access-6mtbf\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731363 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-trusted-ca-bundle\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731393 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-console-config\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731414 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-serving-cert\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731437 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-service-ca\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.731501 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c8c89d49c-bjcpg"] Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.832826 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-oauth-config\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.832900 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-oauth-serving-cert\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.832938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mtbf\" (UniqueName: \"kubernetes.io/projected/3cde98bf-1fc3-4460-879b-26802e310157-kube-api-access-6mtbf\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.832957 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-trusted-ca-bundle\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.832987 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-console-config\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.833008 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-serving-cert\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.833038 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-service-ca\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.836115 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-oauth-serving-cert\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.836770 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-service-ca\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.837141 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-trusted-ca-bundle\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.837578 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-console-config\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.842097 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-oauth-config\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.843404 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-serving-cert\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:12 crc kubenswrapper[4791]: I1208 21:24:12.852413 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mtbf\" (UniqueName: \"kubernetes.io/projected/3cde98bf-1fc3-4460-879b-26802e310157-kube-api-access-6mtbf\") pod \"console-c8c89d49c-bjcpg\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.023727 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.256072 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" event={"ID":"a0252ac3-5f5c-4d97-835b-50f74c280cc9","Type":"ContainerStarted","Data":"0e6dec9aae6554a0aea991ea1b21278bb9c31f1fffcfcad5e7cae4230b990e5b"} Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.260827 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-29ngm" event={"ID":"d6d9a3d8-295b-4b2e-a789-5a61884835a4","Type":"ContainerStarted","Data":"bcc166567440dce2106eea69665eeb93753d5ef74f26fb2cf662e5731345c14d"} Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.260910 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-29ngm" event={"ID":"d6d9a3d8-295b-4b2e-a789-5a61884835a4","Type":"ContainerStarted","Data":"01a1cbd3be1015d3621b0acc0fa55f0546bfeb814e1b36e157b86040332687ac"} Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.278741 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-j79wk" podStartSLOduration=3.915950578 podStartE2EDuration="6.278698828s" podCreationTimestamp="2025-12-08 21:24:07 +0000 UTC" firstStartedPulling="2025-12-08 21:24:08.721733678 +0000 UTC m=+325.420492013" lastFinishedPulling="2025-12-08 21:24:11.084481918 +0000 UTC m=+327.783240263" observedRunningTime="2025-12-08 21:24:13.277575229 +0000 UTC m=+329.976333584" watchObservedRunningTime="2025-12-08 21:24:13.278698828 +0000 UTC m=+329.977457173" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.300403 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/node-exporter-29ngm" podStartSLOduration=4.080176568 podStartE2EDuration="6.300384388s" podCreationTimestamp="2025-12-08 21:24:07 +0000 UTC" firstStartedPulling="2025-12-08 21:24:08.324104504 +0000 UTC m=+325.022862849" lastFinishedPulling="2025-12-08 21:24:10.544312324 +0000 UTC m=+327.243070669" observedRunningTime="2025-12-08 21:24:13.298831439 +0000 UTC m=+329.997589804" watchObservedRunningTime="2025-12-08 21:24:13.300384388 +0000 UTC m=+329.999142733" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.405261 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/metrics-server-f799f76d7-kg8mn"] Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.406197 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.408686 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-client-certs" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.408780 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-dockercfg-g4wqp" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.409100 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-e62ur037npg3i" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.409124 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-server-audit-profiles" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.409337 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-tls" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.410534 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kubelet-serving-ca-bundle" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.420948 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-f799f76d7-kg8mn"] Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.441765 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-client-ca-bundle\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.441847 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/8fb0db65-5bab-450b-a32b-eb6f13028d4b-audit-log\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.441942 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8fb0db65-5bab-450b-a32b-eb6f13028d4b-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.441986 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-secret-metrics-client-certs\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.442015 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/8fb0db65-5bab-450b-a32b-eb6f13028d4b-metrics-server-audit-profiles\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.442049 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfl5z\" (UniqueName: \"kubernetes.io/projected/8fb0db65-5bab-450b-a32b-eb6f13028d4b-kube-api-access-lfl5z\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.442123 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-secret-metrics-server-tls\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.543345 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8fb0db65-5bab-450b-a32b-eb6f13028d4b-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.543687 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-secret-metrics-client-certs\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.543733 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/8fb0db65-5bab-450b-a32b-eb6f13028d4b-metrics-server-audit-profiles\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.543768 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfl5z\" (UniqueName: \"kubernetes.io/projected/8fb0db65-5bab-450b-a32b-eb6f13028d4b-kube-api-access-lfl5z\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.543800 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-secret-metrics-server-tls\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.543862 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-client-ca-bundle\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.543885 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/8fb0db65-5bab-450b-a32b-eb6f13028d4b-audit-log\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.544469 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/8fb0db65-5bab-450b-a32b-eb6f13028d4b-audit-log\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.544531 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8fb0db65-5bab-450b-a32b-eb6f13028d4b-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.545416 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/8fb0db65-5bab-450b-a32b-eb6f13028d4b-metrics-server-audit-profiles\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.548314 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-secret-metrics-client-certs\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.548598 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-client-ca-bundle\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.548675 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/8fb0db65-5bab-450b-a32b-eb6f13028d4b-secret-metrics-server-tls\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.567801 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfl5z\" (UniqueName: \"kubernetes.io/projected/8fb0db65-5bab-450b-a32b-eb6f13028d4b-kube-api-access-lfl5z\") pod \"metrics-server-f799f76d7-kg8mn\" (UID: \"8fb0db65-5bab-450b-a32b-eb6f13028d4b\") " pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.693076 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x"] Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.694008 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.695936 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"monitoring-plugin-cert" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.696483 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"default-dockercfg-6tstp" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.702583 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x"] Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.729773 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.746236 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/80f9c089-797f-4188-ae69-5854a688edf7-monitoring-plugin-cert\") pod \"monitoring-plugin-7ccfb657b9-vjn6x\" (UID: \"80f9c089-797f-4188-ae69-5854a688edf7\") " pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.847395 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/80f9c089-797f-4188-ae69-5854a688edf7-monitoring-plugin-cert\") pod \"monitoring-plugin-7ccfb657b9-vjn6x\" (UID: \"80f9c089-797f-4188-ae69-5854a688edf7\") " pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" Dec 08 21:24:13 crc kubenswrapper[4791]: I1208 21:24:13.851535 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/80f9c089-797f-4188-ae69-5854a688edf7-monitoring-plugin-cert\") pod \"monitoring-plugin-7ccfb657b9-vjn6x\" (UID: \"80f9c089-797f-4188-ae69-5854a688edf7\") " pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" Dec 08 21:24:14 crc kubenswrapper[4791]: I1208 21:24:14.014898 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" Dec 08 21:24:14 crc kubenswrapper[4791]: I1208 21:24:14.989200 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 08 21:24:14 crc kubenswrapper[4791]: I1208 21:24:14.994264 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.006791 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.007105 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-grpc-tls-dt3grt0ufhioc" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.007336 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-dockercfg-v5c2r" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.011181 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-prometheus-http-client-file" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.012746 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"serving-certs-ca-bundle" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.012865 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-kube-rbac-proxy-web" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.012984 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-web-config" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.017140 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-sidecar-tls" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.017472 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-rbac-proxy" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.017613 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.017768 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls-assets-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.021214 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.022944 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-k8s-rulefiles-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.024235 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-trusted-ca-bundle" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.149611 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.149679 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.149741 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-web-config\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.149778 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.149876 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cpg5\" (UniqueName: \"kubernetes.io/projected/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-kube-api-access-6cpg5\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.149971 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-config\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150005 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150068 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150091 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150115 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150155 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150229 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150281 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-config-out\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150336 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150373 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150392 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150408 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.150432 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251505 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251574 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251608 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251645 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251673 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251734 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251767 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-web-config\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251802 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251827 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cpg5\" (UniqueName: \"kubernetes.io/projected/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-kube-api-access-6cpg5\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251856 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-config\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251884 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251914 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.251940 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.252533 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.252592 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.252599 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.252627 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.252731 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-config-out\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.252791 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.252934 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.256031 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.258001 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.258868 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.259638 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.260573 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-config-out\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.260972 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-config\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.261969 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.262025 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.264882 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.265023 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.268605 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.275139 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-web-config\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.280936 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.284413 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.284524 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.286083 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-f799f76d7-kg8mn"] Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.299998 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c8c89d49c-bjcpg"] Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.300307 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cpg5\" (UniqueName: \"kubernetes.io/projected/b6dc8b94-5522-4ff1-88b4-70dc976b76e5-kube-api-access-6cpg5\") pod \"prometheus-k8s-0\" (UID: \"b6dc8b94-5522-4ff1-88b4-70dc976b76e5\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.360497 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x"] Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.378864 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.876031 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 08 21:24:15 crc kubenswrapper[4791]: W1208 21:24:15.885402 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6dc8b94_5522_4ff1_88b4_70dc976b76e5.slice/crio-4f7021efda0b902150b4bec99fd43e995d47b0f11e01bf05eceed48ebd50f8d1 WatchSource:0}: Error finding container 4f7021efda0b902150b4bec99fd43e995d47b0f11e01bf05eceed48ebd50f8d1: Status 404 returned error can't find the container with id 4f7021efda0b902150b4bec99fd43e995d47b0f11e01bf05eceed48ebd50f8d1 Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.921351 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" event={"ID":"8fb0db65-5bab-450b-a32b-eb6f13028d4b","Type":"ContainerStarted","Data":"2fd1ecd7bb42efb2b34e18f05c3cfd5f099a4dea7854fd98f166d2438dfbc7e8"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.923938 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c8c89d49c-bjcpg" event={"ID":"3cde98bf-1fc3-4460-879b-26802e310157","Type":"ContainerStarted","Data":"17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.924022 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c8c89d49c-bjcpg" event={"ID":"3cde98bf-1fc3-4460-879b-26802e310157","Type":"ContainerStarted","Data":"069a6c5640736c6bc82fa8110200d708880b290a87333fb9918ffc8526dfd599"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.931836 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" event={"ID":"328971ba-ea1e-4447-8274-8d7dceec9f72","Type":"ContainerStarted","Data":"792406d50538194b40d495d7554aa8aaa45e1deed351eff404edff922206ff3c"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.931900 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" event={"ID":"328971ba-ea1e-4447-8274-8d7dceec9f72","Type":"ContainerStarted","Data":"3b5b06c9e2b902e5e372da3d59c5cb3d9ebd54008626b528d9f681c34f851172"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.931915 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" event={"ID":"328971ba-ea1e-4447-8274-8d7dceec9f72","Type":"ContainerStarted","Data":"ae14f4f8fa669b1a225c3b677c56b92465876ae2a7559f155eab0f46cc23d197"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.932734 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" event={"ID":"80f9c089-797f-4188-ae69-5854a688edf7","Type":"ContainerStarted","Data":"57ecc9a68522f44b05d3c10fda139146947d3730da87b1966eca32b1259ae96a"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.933845 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerStarted","Data":"4f7021efda0b902150b4bec99fd43e995d47b0f11e01bf05eceed48ebd50f8d1"} Dec 08 21:24:15 crc kubenswrapper[4791]: I1208 21:24:15.948617 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-c8c89d49c-bjcpg" podStartSLOduration=3.948575877 podStartE2EDuration="3.948575877s" podCreationTimestamp="2025-12-08 21:24:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:24:15.944208106 +0000 UTC m=+332.642966451" watchObservedRunningTime="2025-12-08 21:24:15.948575877 +0000 UTC m=+332.647334222" Dec 08 21:24:16 crc kubenswrapper[4791]: I1208 21:24:16.944980 4791 generic.go:334] "Generic (PLEG): container finished" podID="b6dc8b94-5522-4ff1-88b4-70dc976b76e5" containerID="0f7e4eca8369c77bc42f87d83f746f5345c15dc4a28c9574f04622920e9be8c5" exitCode=0 Dec 08 21:24:16 crc kubenswrapper[4791]: I1208 21:24:16.946655 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerDied","Data":"0f7e4eca8369c77bc42f87d83f746f5345c15dc4a28c9574f04622920e9be8c5"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.061260 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" event={"ID":"80f9c089-797f-4188-ae69-5854a688edf7","Type":"ContainerStarted","Data":"46dd0977a8c65b38c3d6d9880cf487f3214193dc0ca1823cb3c8c670ce38d40f"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.062034 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.067138 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" event={"ID":"8fb0db65-5bab-450b-a32b-eb6f13028d4b","Type":"ContainerStarted","Data":"8a27782003d40f78e71529f7a3f2f3bf96eb0fa506332c732197b55fa41bd719"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.069310 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.073561 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerStarted","Data":"f1b659a642b32afc12fac4b648e3186f8d6e7a070eefbd914f1783987317ff87"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.073595 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerStarted","Data":"f3e6acb0c1c571dc67c538d3ee7a5fbfa713cff0cf6f07f67b014de0b54e632d"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.073607 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerStarted","Data":"cb439afaa0ddb82515bc9a83d2e43fbf832461697b618cc63a4d2e271dc3e1e8"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.077319 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" event={"ID":"328971ba-ea1e-4447-8274-8d7dceec9f72","Type":"ContainerStarted","Data":"cf0636f9f43c474e6b2449616a4aa5421781284d6b6f6b3df0ad7aa3c1ff9c39"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.077358 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" event={"ID":"328971ba-ea1e-4447-8274-8d7dceec9f72","Type":"ContainerStarted","Data":"6653f963cb70ba9a2ebe222b5b72a318f72df44761f44b432a6f9d851437a6a6"} Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.320021 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/monitoring-plugin-7ccfb657b9-vjn6x" podStartSLOduration=3.546839324 podStartE2EDuration="7.319994414s" podCreationTimestamp="2025-12-08 21:24:13 +0000 UTC" firstStartedPulling="2025-12-08 21:24:15.379501738 +0000 UTC m=+332.078260083" lastFinishedPulling="2025-12-08 21:24:19.152656828 +0000 UTC m=+335.851415173" observedRunningTime="2025-12-08 21:24:20.2883051 +0000 UTC m=+336.987063445" watchObservedRunningTime="2025-12-08 21:24:20.319994414 +0000 UTC m=+337.018752759" Dec 08 21:24:20 crc kubenswrapper[4791]: I1208 21:24:20.330324 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" podStartSLOduration=3.478570192 podStartE2EDuration="7.330302835s" podCreationTimestamp="2025-12-08 21:24:13 +0000 UTC" firstStartedPulling="2025-12-08 21:24:15.294891072 +0000 UTC m=+331.993649417" lastFinishedPulling="2025-12-08 21:24:19.146623715 +0000 UTC m=+335.845382060" observedRunningTime="2025-12-08 21:24:20.326010627 +0000 UTC m=+337.024768972" watchObservedRunningTime="2025-12-08 21:24:20.330302835 +0000 UTC m=+337.029061180" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.027155 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.027556 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.034031 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.130285 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerStarted","Data":"a18cd3aeea830cff74f040680096ada14576f75f231f1625d86a59d4c640b86b"} Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.130349 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerStarted","Data":"63ed1dee185122773e8fd99089eb0aa534657fecda3ab827f3f7f3f660e378ed"} Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.151352 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerStarted","Data":"e21e6e09274d600773ed01aa0ab53cd11d510f4785c35df21ed86b74260c4b0b"} Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.151416 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerStarted","Data":"7619d874be048e8d3820c5e2d0010c55bec150bd4a322d4275f44191b9e97b22"} Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.434696 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" event={"ID":"328971ba-ea1e-4447-8274-8d7dceec9f72","Type":"ContainerStarted","Data":"49a3b2cd227739822192ae501adb267a093fb2ae22eb0d40f9b4a809b3542dcb"} Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.435192 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.445072 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.456546 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.483993 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/thanos-querier-54b8d7cb66-qrzdx" podStartSLOduration=6.404103912 podStartE2EDuration="14.483970208s" podCreationTimestamp="2025-12-08 21:24:09 +0000 UTC" firstStartedPulling="2025-12-08 21:24:11.06797744 +0000 UTC m=+327.766735775" lastFinishedPulling="2025-12-08 21:24:19.147843726 +0000 UTC m=+335.846602071" observedRunningTime="2025-12-08 21:24:23.483503426 +0000 UTC m=+340.182261771" watchObservedRunningTime="2025-12-08 21:24:23.483970208 +0000 UTC m=+340.182728553" Dec 08 21:24:23 crc kubenswrapper[4791]: I1208 21:24:23.621223 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-vck6k"] Dec 08 21:24:24 crc kubenswrapper[4791]: I1208 21:24:24.449391 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerStarted","Data":"7d823c08d649a06d9e2573b40ff266b997d8ab8fbbf8c314963422f1d2b1dc26"} Dec 08 21:24:24 crc kubenswrapper[4791]: I1208 21:24:24.449695 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerStarted","Data":"b992501afe841978d9509c30266e57f73503d8db5b435e3f7c722276236add73"} Dec 08 21:24:24 crc kubenswrapper[4791]: I1208 21:24:24.449734 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerStarted","Data":"82461ef653a2f6feb0bc05da1d21840c96e2c633988f523e91b0543b66487e02"} Dec 08 21:24:24 crc kubenswrapper[4791]: I1208 21:24:24.449744 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"b6dc8b94-5522-4ff1-88b4-70dc976b76e5","Type":"ContainerStarted","Data":"49e23cb091150e45f1b0cb6f7ae5e8e51c90e557ea23d0e0a26e9bd31ba276d4"} Dec 08 21:24:24 crc kubenswrapper[4791]: I1208 21:24:24.454271 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"f8e215aa-7580-46b7-a6a0-37e3f139cde7","Type":"ContainerStarted","Data":"1a02e914f6ce3b3aba5b1c059361f04447b5fe3598843579db21e42116d776ec"} Dec 08 21:24:24 crc kubenswrapper[4791]: I1208 21:24:24.481614 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-k8s-0" podStartSLOduration=4.843986115 podStartE2EDuration="10.481590818s" podCreationTimestamp="2025-12-08 21:24:14 +0000 UTC" firstStartedPulling="2025-12-08 21:24:16.947250414 +0000 UTC m=+333.646008759" lastFinishedPulling="2025-12-08 21:24:22.584855117 +0000 UTC m=+339.283613462" observedRunningTime="2025-12-08 21:24:24.477847903 +0000 UTC m=+341.176606268" watchObservedRunningTime="2025-12-08 21:24:24.481590818 +0000 UTC m=+341.180349173" Dec 08 21:24:24 crc kubenswrapper[4791]: I1208 21:24:24.522246 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/alertmanager-main-0" podStartSLOduration=7.465607961 podStartE2EDuration="16.522225439s" podCreationTimestamp="2025-12-08 21:24:08 +0000 UTC" firstStartedPulling="2025-12-08 21:24:10.09131785 +0000 UTC m=+326.790076195" lastFinishedPulling="2025-12-08 21:24:19.147935328 +0000 UTC m=+335.846693673" observedRunningTime="2025-12-08 21:24:24.513761875 +0000 UTC m=+341.212520230" watchObservedRunningTime="2025-12-08 21:24:24.522225439 +0000 UTC m=+341.220983784" Dec 08 21:24:25 crc kubenswrapper[4791]: I1208 21:24:25.380629 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:24:33 crc kubenswrapper[4791]: I1208 21:24:33.730692 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:33 crc kubenswrapper[4791]: I1208 21:24:33.731212 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:35 crc kubenswrapper[4791]: I1208 21:24:35.251548 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:24:35 crc kubenswrapper[4791]: I1208 21:24:35.251624 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:24:48 crc kubenswrapper[4791]: I1208 21:24:48.681015 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-vck6k" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerName="console" containerID="cri-o://c8b446ed2b6ade0eca7b307baa94aa89649601ec5d3fe63477a0276f8f3e13db" gracePeriod=15 Dec 08 21:24:49 crc kubenswrapper[4791]: I1208 21:24:49.616676 4791 patch_prober.go:28] interesting pod/console-f9d7485db-vck6k container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 08 21:24:49 crc kubenswrapper[4791]: I1208 21:24:49.617254 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-vck6k" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 08 21:24:50 crc kubenswrapper[4791]: I1208 21:24:50.660745 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-vck6k_959a8eb5-7d1a-4e10-bfc4-c23a1223d38d/console/0.log" Dec 08 21:24:50 crc kubenswrapper[4791]: I1208 21:24:50.660792 4791 generic.go:334] "Generic (PLEG): container finished" podID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerID="c8b446ed2b6ade0eca7b307baa94aa89649601ec5d3fe63477a0276f8f3e13db" exitCode=2 Dec 08 21:24:50 crc kubenswrapper[4791]: I1208 21:24:50.660855 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-vck6k" event={"ID":"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d","Type":"ContainerDied","Data":"c8b446ed2b6ade0eca7b307baa94aa89649601ec5d3fe63477a0276f8f3e13db"} Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.191154 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-vck6k_959a8eb5-7d1a-4e10-bfc4-c23a1223d38d/console/0.log" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.191531 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.208775 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-service-ca\") pod \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.208818 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-trusted-ca-bundle\") pod \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.208866 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-serving-cert\") pod \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.208915 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-oauth-serving-cert\") pod \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.208962 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-oauth-config\") pod \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.208997 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdtm9\" (UniqueName: \"kubernetes.io/projected/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-kube-api-access-hdtm9\") pod \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.209025 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-config\") pod \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\" (UID: \"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d\") " Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.210145 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-config" (OuterVolumeSpecName: "console-config") pod "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" (UID: "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.210851 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" (UID: "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.211005 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-service-ca" (OuterVolumeSpecName: "service-ca") pod "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" (UID: "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.211925 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" (UID: "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.217123 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-kube-api-access-hdtm9" (OuterVolumeSpecName: "kube-api-access-hdtm9") pod "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" (UID: "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d"). InnerVolumeSpecName "kube-api-access-hdtm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.217481 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" (UID: "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.223394 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" (UID: "959a8eb5-7d1a-4e10-bfc4-c23a1223d38d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.310435 4791 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.310470 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdtm9\" (UniqueName: \"kubernetes.io/projected/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-kube-api-access-hdtm9\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.310480 4791 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.310489 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.310498 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.310508 4791 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.310516 4791 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.668337 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-vck6k_959a8eb5-7d1a-4e10-bfc4-c23a1223d38d/console/0.log" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.668412 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-vck6k" event={"ID":"959a8eb5-7d1a-4e10-bfc4-c23a1223d38d","Type":"ContainerDied","Data":"b9d58ec7616f582daeecfebd87069bbd98c4bad9f8a360421e0ca724d933955e"} Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.668457 4791 scope.go:117] "RemoveContainer" containerID="c8b446ed2b6ade0eca7b307baa94aa89649601ec5d3fe63477a0276f8f3e13db" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.669155 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-vck6k" Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.691324 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-vck6k"] Dec 08 21:24:51 crc kubenswrapper[4791]: I1208 21:24:51.697167 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-vck6k"] Dec 08 21:24:53 crc kubenswrapper[4791]: I1208 21:24:53.610353 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" path="/var/lib/kubelet/pods/959a8eb5-7d1a-4e10-bfc4-c23a1223d38d/volumes" Dec 08 21:24:53 crc kubenswrapper[4791]: I1208 21:24:53.737265 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:24:53 crc kubenswrapper[4791]: I1208 21:24:53.743520 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" Dec 08 21:25:05 crc kubenswrapper[4791]: I1208 21:25:05.251216 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:25:05 crc kubenswrapper[4791]: I1208 21:25:05.251741 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:25:15 crc kubenswrapper[4791]: I1208 21:25:15.379916 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:25:15 crc kubenswrapper[4791]: I1208 21:25:15.409870 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:25:15 crc kubenswrapper[4791]: I1208 21:25:15.905483 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-k8s-0" Dec 08 21:25:29 crc kubenswrapper[4791]: I1208 21:25:29.980903 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-86c75cd9c-lmfh7"] Dec 08 21:25:29 crc kubenswrapper[4791]: E1208 21:25:29.987226 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerName="console" Dec 08 21:25:29 crc kubenswrapper[4791]: I1208 21:25:29.987439 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerName="console" Dec 08 21:25:29 crc kubenswrapper[4791]: I1208 21:25:29.987805 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="959a8eb5-7d1a-4e10-bfc4-c23a1223d38d" containerName="console" Dec 08 21:25:29 crc kubenswrapper[4791]: I1208 21:25:29.988846 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:29 crc kubenswrapper[4791]: I1208 21:25:29.991341 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-86c75cd9c-lmfh7"] Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.073274 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-oauth-config\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.073347 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm45b\" (UniqueName: \"kubernetes.io/projected/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-kube-api-access-lm45b\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.073369 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-oauth-serving-cert\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.073391 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-trusted-ca-bundle\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.073416 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-serving-cert\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.073657 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-config\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.073820 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-service-ca\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.175533 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-oauth-config\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.175634 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm45b\" (UniqueName: \"kubernetes.io/projected/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-kube-api-access-lm45b\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.175668 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-oauth-serving-cert\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.175693 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-trusted-ca-bundle\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.175743 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-serving-cert\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.175809 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-config\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.175849 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-service-ca\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.176992 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-oauth-serving-cert\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.177068 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-config\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.177298 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-service-ca\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.177532 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-trusted-ca-bundle\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.185467 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-oauth-config\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.193640 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm45b\" (UniqueName: \"kubernetes.io/projected/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-kube-api-access-lm45b\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.195018 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-serving-cert\") pod \"console-86c75cd9c-lmfh7\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.314906 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.562929 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-86c75cd9c-lmfh7"] Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.980617 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86c75cd9c-lmfh7" event={"ID":"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa","Type":"ContainerStarted","Data":"4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec"} Dec 08 21:25:30 crc kubenswrapper[4791]: I1208 21:25:30.981024 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86c75cd9c-lmfh7" event={"ID":"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa","Type":"ContainerStarted","Data":"71991288d6da38dc9fdcce84a4b146a3683a49436c31ababde6b949cd0c69215"} Dec 08 21:25:31 crc kubenswrapper[4791]: I1208 21:25:31.004181 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-86c75cd9c-lmfh7" podStartSLOduration=2.004160294 podStartE2EDuration="2.004160294s" podCreationTimestamp="2025-12-08 21:25:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:25:30.997582648 +0000 UTC m=+407.696341003" watchObservedRunningTime="2025-12-08 21:25:31.004160294 +0000 UTC m=+407.702918639" Dec 08 21:25:35 crc kubenswrapper[4791]: I1208 21:25:35.251431 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:25:35 crc kubenswrapper[4791]: I1208 21:25:35.252084 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:25:35 crc kubenswrapper[4791]: I1208 21:25:35.252334 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:25:35 crc kubenswrapper[4791]: I1208 21:25:35.253029 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f02e0448eb4a9f07a94c3f82c8906b8b6abe63c2035df6075b84928e6646429d"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:25:35 crc kubenswrapper[4791]: I1208 21:25:35.253104 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://f02e0448eb4a9f07a94c3f82c8906b8b6abe63c2035df6075b84928e6646429d" gracePeriod=600 Dec 08 21:25:36 crc kubenswrapper[4791]: I1208 21:25:36.018180 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="f02e0448eb4a9f07a94c3f82c8906b8b6abe63c2035df6075b84928e6646429d" exitCode=0 Dec 08 21:25:36 crc kubenswrapper[4791]: I1208 21:25:36.018308 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"f02e0448eb4a9f07a94c3f82c8906b8b6abe63c2035df6075b84928e6646429d"} Dec 08 21:25:36 crc kubenswrapper[4791]: I1208 21:25:36.018986 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"839f3fac6b8b61fb2e046952212eab2b1c2a06973dd6faea1a1740a19a5794d2"} Dec 08 21:25:36 crc kubenswrapper[4791]: I1208 21:25:36.019016 4791 scope.go:117] "RemoveContainer" containerID="7c1cbce001d702f40825e1e2c6c77ce54111ab2097acbc1cc3d365fd3d7b06cc" Dec 08 21:25:40 crc kubenswrapper[4791]: I1208 21:25:40.315844 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:40 crc kubenswrapper[4791]: I1208 21:25:40.316493 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:40 crc kubenswrapper[4791]: I1208 21:25:40.321871 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:41 crc kubenswrapper[4791]: I1208 21:25:41.057338 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:25:41 crc kubenswrapper[4791]: I1208 21:25:41.129540 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-c8c89d49c-bjcpg"] Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.212081 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-c8c89d49c-bjcpg" podUID="3cde98bf-1fc3-4460-879b-26802e310157" containerName="console" containerID="cri-o://17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba" gracePeriod=15 Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.539686 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-c8c89d49c-bjcpg_3cde98bf-1fc3-4460-879b-26802e310157/console/0.log" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.540063 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.651219 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-trusted-ca-bundle\") pod \"3cde98bf-1fc3-4460-879b-26802e310157\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.651287 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-serving-cert\") pod \"3cde98bf-1fc3-4460-879b-26802e310157\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.651316 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-oauth-serving-cert\") pod \"3cde98bf-1fc3-4460-879b-26802e310157\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.651406 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-service-ca\") pod \"3cde98bf-1fc3-4460-879b-26802e310157\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.651433 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-console-config\") pod \"3cde98bf-1fc3-4460-879b-26802e310157\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.651499 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mtbf\" (UniqueName: \"kubernetes.io/projected/3cde98bf-1fc3-4460-879b-26802e310157-kube-api-access-6mtbf\") pod \"3cde98bf-1fc3-4460-879b-26802e310157\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.651555 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-oauth-config\") pod \"3cde98bf-1fc3-4460-879b-26802e310157\" (UID: \"3cde98bf-1fc3-4460-879b-26802e310157\") " Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.652672 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "3cde98bf-1fc3-4460-879b-26802e310157" (UID: "3cde98bf-1fc3-4460-879b-26802e310157"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.652694 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "3cde98bf-1fc3-4460-879b-26802e310157" (UID: "3cde98bf-1fc3-4460-879b-26802e310157"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.652887 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-console-config" (OuterVolumeSpecName: "console-config") pod "3cde98bf-1fc3-4460-879b-26802e310157" (UID: "3cde98bf-1fc3-4460-879b-26802e310157"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.653450 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-service-ca" (OuterVolumeSpecName: "service-ca") pod "3cde98bf-1fc3-4460-879b-26802e310157" (UID: "3cde98bf-1fc3-4460-879b-26802e310157"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.658483 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "3cde98bf-1fc3-4460-879b-26802e310157" (UID: "3cde98bf-1fc3-4460-879b-26802e310157"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.663517 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "3cde98bf-1fc3-4460-879b-26802e310157" (UID: "3cde98bf-1fc3-4460-879b-26802e310157"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.664979 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cde98bf-1fc3-4460-879b-26802e310157-kube-api-access-6mtbf" (OuterVolumeSpecName: "kube-api-access-6mtbf") pod "3cde98bf-1fc3-4460-879b-26802e310157" (UID: "3cde98bf-1fc3-4460-879b-26802e310157"). InnerVolumeSpecName "kube-api-access-6mtbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.754078 4791 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.754380 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.754436 4791 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cde98bf-1fc3-4460-879b-26802e310157-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.754485 4791 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.754541 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.754597 4791 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cde98bf-1fc3-4460-879b-26802e310157-console-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:26:06 crc kubenswrapper[4791]: I1208 21:26:06.754650 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mtbf\" (UniqueName: \"kubernetes.io/projected/3cde98bf-1fc3-4460-879b-26802e310157-kube-api-access-6mtbf\") on node \"crc\" DevicePath \"\"" Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.230666 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-c8c89d49c-bjcpg_3cde98bf-1fc3-4460-879b-26802e310157/console/0.log" Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.230799 4791 generic.go:334] "Generic (PLEG): container finished" podID="3cde98bf-1fc3-4460-879b-26802e310157" containerID="17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba" exitCode=2 Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.230852 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c8c89d49c-bjcpg" event={"ID":"3cde98bf-1fc3-4460-879b-26802e310157","Type":"ContainerDied","Data":"17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba"} Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.230898 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c8c89d49c-bjcpg" event={"ID":"3cde98bf-1fc3-4460-879b-26802e310157","Type":"ContainerDied","Data":"069a6c5640736c6bc82fa8110200d708880b290a87333fb9918ffc8526dfd599"} Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.230922 4791 scope.go:117] "RemoveContainer" containerID="17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba" Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.230926 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c8c89d49c-bjcpg" Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.249838 4791 scope.go:117] "RemoveContainer" containerID="17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba" Dec 08 21:26:07 crc kubenswrapper[4791]: E1208 21:26:07.250211 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba\": container with ID starting with 17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba not found: ID does not exist" containerID="17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba" Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.250281 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba"} err="failed to get container status \"17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba\": rpc error: code = NotFound desc = could not find container \"17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba\": container with ID starting with 17a10b1eb50fcf81abeca9cd61c8ffc2cd64370c87e29abd527fc041f960d2ba not found: ID does not exist" Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.267208 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-c8c89d49c-bjcpg"] Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.270795 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-c8c89d49c-bjcpg"] Dec 08 21:26:07 crc kubenswrapper[4791]: I1208 21:26:07.607217 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cde98bf-1fc3-4460-879b-26802e310157" path="/var/lib/kubelet/pods/3cde98bf-1fc3-4460-879b-26802e310157/volumes" Dec 08 21:27:35 crc kubenswrapper[4791]: I1208 21:27:35.251984 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:27:35 crc kubenswrapper[4791]: I1208 21:27:35.253059 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:28:05 crc kubenswrapper[4791]: I1208 21:28:05.252178 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:28:05 crc kubenswrapper[4791]: I1208 21:28:05.253692 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:28:35 crc kubenswrapper[4791]: I1208 21:28:35.251501 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:28:35 crc kubenswrapper[4791]: I1208 21:28:35.251934 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:28:35 crc kubenswrapper[4791]: I1208 21:28:35.251981 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:28:35 crc kubenswrapper[4791]: I1208 21:28:35.252927 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"839f3fac6b8b61fb2e046952212eab2b1c2a06973dd6faea1a1740a19a5794d2"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:28:35 crc kubenswrapper[4791]: I1208 21:28:35.253017 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://839f3fac6b8b61fb2e046952212eab2b1c2a06973dd6faea1a1740a19a5794d2" gracePeriod=600 Dec 08 21:28:36 crc kubenswrapper[4791]: I1208 21:28:36.208367 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="839f3fac6b8b61fb2e046952212eab2b1c2a06973dd6faea1a1740a19a5794d2" exitCode=0 Dec 08 21:28:36 crc kubenswrapper[4791]: I1208 21:28:36.208450 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"839f3fac6b8b61fb2e046952212eab2b1c2a06973dd6faea1a1740a19a5794d2"} Dec 08 21:28:36 crc kubenswrapper[4791]: I1208 21:28:36.208694 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"905d580dece9c9ce3b0d4703b0d2f998c3f8e981a36384e1aaaf1f0e9b3109d3"} Dec 08 21:28:36 crc kubenswrapper[4791]: I1208 21:28:36.208734 4791 scope.go:117] "RemoveContainer" containerID="f02e0448eb4a9f07a94c3f82c8906b8b6abe63c2035df6075b84928e6646429d" Dec 08 21:29:33 crc kubenswrapper[4791]: I1208 21:29:33.193830 4791 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.769952 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7rbmg"] Dec 08 21:29:49 crc kubenswrapper[4791]: E1208 21:29:49.770696 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cde98bf-1fc3-4460-879b-26802e310157" containerName="console" Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.770731 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cde98bf-1fc3-4460-879b-26802e310157" containerName="console" Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.770840 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cde98bf-1fc3-4460-879b-26802e310157" containerName="console" Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.771663 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.787921 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rbmg"] Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.932332 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-utilities\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.932415 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpnmw\" (UniqueName: \"kubernetes.io/projected/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-kube-api-access-jpnmw\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:49 crc kubenswrapper[4791]: I1208 21:29:49.932825 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-catalog-content\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.034807 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-utilities\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.034885 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpnmw\" (UniqueName: \"kubernetes.io/projected/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-kube-api-access-jpnmw\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.034971 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-catalog-content\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.035810 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-catalog-content\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.035842 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-utilities\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.060524 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpnmw\" (UniqueName: \"kubernetes.io/projected/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-kube-api-access-jpnmw\") pod \"redhat-marketplace-7rbmg\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.098108 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:29:50 crc kubenswrapper[4791]: I1208 21:29:50.694519 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rbmg"] Dec 08 21:29:51 crc kubenswrapper[4791]: I1208 21:29:51.676151 4791 generic.go:334] "Generic (PLEG): container finished" podID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerID="ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c" exitCode=0 Dec 08 21:29:51 crc kubenswrapper[4791]: I1208 21:29:51.676283 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rbmg" event={"ID":"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2","Type":"ContainerDied","Data":"ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c"} Dec 08 21:29:51 crc kubenswrapper[4791]: I1208 21:29:51.676637 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rbmg" event={"ID":"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2","Type":"ContainerStarted","Data":"7231ddf2eb85d7500d0b0a7aa65bda1d8a3548eb11b0e798da9af30b3b2d2b89"} Dec 08 21:29:51 crc kubenswrapper[4791]: I1208 21:29:51.681937 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:29:52 crc kubenswrapper[4791]: I1208 21:29:52.684880 4791 generic.go:334] "Generic (PLEG): container finished" podID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerID="89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b" exitCode=0 Dec 08 21:29:52 crc kubenswrapper[4791]: I1208 21:29:52.684925 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rbmg" event={"ID":"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2","Type":"ContainerDied","Data":"89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b"} Dec 08 21:29:53 crc kubenswrapper[4791]: I1208 21:29:53.694029 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rbmg" event={"ID":"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2","Type":"ContainerStarted","Data":"2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450"} Dec 08 21:29:53 crc kubenswrapper[4791]: I1208 21:29:53.713865 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7rbmg" podStartSLOduration=3.299845596 podStartE2EDuration="4.713846299s" podCreationTimestamp="2025-12-08 21:29:49 +0000 UTC" firstStartedPulling="2025-12-08 21:29:51.681544571 +0000 UTC m=+668.380302916" lastFinishedPulling="2025-12-08 21:29:53.095545264 +0000 UTC m=+669.794303619" observedRunningTime="2025-12-08 21:29:53.711949181 +0000 UTC m=+670.410707536" watchObservedRunningTime="2025-12-08 21:29:53.713846299 +0000 UTC m=+670.412604664" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.201262 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d"] Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.203097 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.205816 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.211785 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d"] Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.274852 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.274935 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lqwv\" (UniqueName: \"kubernetes.io/projected/3af99b44-74f9-417f-9b20-ee09a09fe7d1-kube-api-access-6lqwv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.274972 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.376998 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.377036 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lqwv\" (UniqueName: \"kubernetes.io/projected/3af99b44-74f9-417f-9b20-ee09a09fe7d1-kube-api-access-6lqwv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.377071 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.377630 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.377654 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.397886 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lqwv\" (UniqueName: \"kubernetes.io/projected/3af99b44-74f9-417f-9b20-ee09a09fe7d1-kube-api-access-6lqwv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.525564 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:29:59 crc kubenswrapper[4791]: I1208 21:29:59.858549 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d"] Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.098986 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.099045 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.156104 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4"] Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.157131 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.162140 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4"] Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.162789 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.163341 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.165074 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.189881 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-config-volume\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.189931 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-secret-volume\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.189976 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr74c\" (UniqueName: \"kubernetes.io/projected/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-kube-api-access-zr74c\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.292422 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-config-volume\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.292486 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-secret-volume\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.292541 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr74c\" (UniqueName: \"kubernetes.io/projected/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-kube-api-access-zr74c\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.293522 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-config-volume\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.298281 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-secret-volume\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.310657 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr74c\" (UniqueName: \"kubernetes.io/projected/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-kube-api-access-zr74c\") pod \"collect-profiles-29420490-klwv4\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.482513 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.780144 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" event={"ID":"3af99b44-74f9-417f-9b20-ee09a09fe7d1","Type":"ContainerStarted","Data":"db08ee67dc097c4bb3cfa965d35d92302711ded7a3105b5133b1c552c613a52e"} Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.780197 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" event={"ID":"3af99b44-74f9-417f-9b20-ee09a09fe7d1","Type":"ContainerStarted","Data":"0148c319cdd36d733a34ac045f855daf8b38d6dd64a2782c276431f7aecd2c74"} Dec 08 21:30:00 crc kubenswrapper[4791]: I1208 21:30:00.874219 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:30:01 crc kubenswrapper[4791]: I1208 21:30:01.031008 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4"] Dec 08 21:30:01 crc kubenswrapper[4791]: W1208 21:30:01.037565 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69c09f4f_b646_4950_ace1_e4c4c8fb3c3e.slice/crio-2be08e4c37e2499351d0436043d43179bf1dd47cb547bf813efbf94fc00afe76 WatchSource:0}: Error finding container 2be08e4c37e2499351d0436043d43179bf1dd47cb547bf813efbf94fc00afe76: Status 404 returned error can't find the container with id 2be08e4c37e2499351d0436043d43179bf1dd47cb547bf813efbf94fc00afe76 Dec 08 21:30:01 crc kubenswrapper[4791]: I1208 21:30:01.786753 4791 generic.go:334] "Generic (PLEG): container finished" podID="69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" containerID="ff380b221b95f44bf3f85d68556f419d611fe1f3e6b4e5e9b308af98c52de7fd" exitCode=0 Dec 08 21:30:01 crc kubenswrapper[4791]: I1208 21:30:01.786846 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" event={"ID":"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e","Type":"ContainerDied","Data":"ff380b221b95f44bf3f85d68556f419d611fe1f3e6b4e5e9b308af98c52de7fd"} Dec 08 21:30:01 crc kubenswrapper[4791]: I1208 21:30:01.786880 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" event={"ID":"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e","Type":"ContainerStarted","Data":"2be08e4c37e2499351d0436043d43179bf1dd47cb547bf813efbf94fc00afe76"} Dec 08 21:30:01 crc kubenswrapper[4791]: I1208 21:30:01.790060 4791 generic.go:334] "Generic (PLEG): container finished" podID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerID="db08ee67dc097c4bb3cfa965d35d92302711ded7a3105b5133b1c552c613a52e" exitCode=0 Dec 08 21:30:01 crc kubenswrapper[4791]: I1208 21:30:01.790143 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" event={"ID":"3af99b44-74f9-417f-9b20-ee09a09fe7d1","Type":"ContainerDied","Data":"db08ee67dc097c4bb3cfa965d35d92302711ded7a3105b5133b1c552c613a52e"} Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.151613 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-578fb"] Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.156061 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.162939 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-578fb"] Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.296886 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46942f67-6d8f-4500-80c6-81c8d07c6fe5-catalog-content\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.297193 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46942f67-6d8f-4500-80c6-81c8d07c6fe5-utilities\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.297292 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rmhm\" (UniqueName: \"kubernetes.io/projected/46942f67-6d8f-4500-80c6-81c8d07c6fe5-kube-api-access-4rmhm\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.398895 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46942f67-6d8f-4500-80c6-81c8d07c6fe5-catalog-content\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.399005 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46942f67-6d8f-4500-80c6-81c8d07c6fe5-utilities\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.399054 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rmhm\" (UniqueName: \"kubernetes.io/projected/46942f67-6d8f-4500-80c6-81c8d07c6fe5-kube-api-access-4rmhm\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.399854 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46942f67-6d8f-4500-80c6-81c8d07c6fe5-catalog-content\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.399898 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46942f67-6d8f-4500-80c6-81c8d07c6fe5-utilities\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.422519 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rmhm\" (UniqueName: \"kubernetes.io/projected/46942f67-6d8f-4500-80c6-81c8d07c6fe5-kube-api-access-4rmhm\") pod \"redhat-operators-578fb\" (UID: \"46942f67-6d8f-4500-80c6-81c8d07c6fe5\") " pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:02 crc kubenswrapper[4791]: I1208 21:30:02.473314 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.115307 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-578fb"] Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.359350 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.523863 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-secret-volume\") pod \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.525155 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-config-volume\") pod \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.525215 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zr74c\" (UniqueName: \"kubernetes.io/projected/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-kube-api-access-zr74c\") pod \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\" (UID: \"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e\") " Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.525769 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-config-volume" (OuterVolumeSpecName: "config-volume") pod "69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" (UID: "69c09f4f-b646-4950-ace1-e4c4c8fb3c3e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.538105 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-kube-api-access-zr74c" (OuterVolumeSpecName: "kube-api-access-zr74c") pod "69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" (UID: "69c09f4f-b646-4950-ace1-e4c4c8fb3c3e"). InnerVolumeSpecName "kube-api-access-zr74c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.538184 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" (UID: "69c09f4f-b646-4950-ace1-e4c4c8fb3c3e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.628912 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.628979 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.629045 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zr74c\" (UniqueName: \"kubernetes.io/projected/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e-kube-api-access-zr74c\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.739262 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rbmg"] Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.739506 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7rbmg" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="registry-server" containerID="cri-o://2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450" gracePeriod=2 Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.841371 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.841677 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4" event={"ID":"69c09f4f-b646-4950-ace1-e4c4c8fb3c3e","Type":"ContainerDied","Data":"2be08e4c37e2499351d0436043d43179bf1dd47cb547bf813efbf94fc00afe76"} Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.841744 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2be08e4c37e2499351d0436043d43179bf1dd47cb547bf813efbf94fc00afe76" Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.844029 4791 generic.go:334] "Generic (PLEG): container finished" podID="46942f67-6d8f-4500-80c6-81c8d07c6fe5" containerID="da173c972be994ce1e9116857ecb993dc9c9c81bcabd95b8d3052bef8577937a" exitCode=0 Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.844063 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-578fb" event={"ID":"46942f67-6d8f-4500-80c6-81c8d07c6fe5","Type":"ContainerDied","Data":"da173c972be994ce1e9116857ecb993dc9c9c81bcabd95b8d3052bef8577937a"} Dec 08 21:30:03 crc kubenswrapper[4791]: I1208 21:30:03.844085 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-578fb" event={"ID":"46942f67-6d8f-4500-80c6-81c8d07c6fe5","Type":"ContainerStarted","Data":"12e5396034bd102d0044ab16f9c2046e3d210daed5f17c2231f7a53b8c4541ef"} Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.146417 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.339588 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpnmw\" (UniqueName: \"kubernetes.io/projected/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-kube-api-access-jpnmw\") pod \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.340014 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-catalog-content\") pod \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.340079 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-utilities\") pod \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\" (UID: \"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2\") " Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.340965 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-utilities" (OuterVolumeSpecName: "utilities") pod "cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" (UID: "cc93922e-1dc5-4b15-9ab3-7a641f3cbce2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.345451 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-kube-api-access-jpnmw" (OuterVolumeSpecName: "kube-api-access-jpnmw") pod "cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" (UID: "cc93922e-1dc5-4b15-9ab3-7a641f3cbce2"). InnerVolumeSpecName "kube-api-access-jpnmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.357661 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" (UID: "cc93922e-1dc5-4b15-9ab3-7a641f3cbce2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.441818 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpnmw\" (UniqueName: \"kubernetes.io/projected/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-kube-api-access-jpnmw\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.441883 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:04 crc kubenswrapper[4791]: I1208 21:30:04.441893 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.066721 4791 generic.go:334] "Generic (PLEG): container finished" podID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerID="fcd9a92a0edbcfbad3cd4b56258eae9ed35c931b4ca02c3789640049066ae2c9" exitCode=0 Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.066802 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" event={"ID":"3af99b44-74f9-417f-9b20-ee09a09fe7d1","Type":"ContainerDied","Data":"fcd9a92a0edbcfbad3cd4b56258eae9ed35c931b4ca02c3789640049066ae2c9"} Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.072090 4791 generic.go:334] "Generic (PLEG): container finished" podID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerID="2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450" exitCode=0 Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.072144 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rbmg" event={"ID":"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2","Type":"ContainerDied","Data":"2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450"} Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.072176 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rbmg" event={"ID":"cc93922e-1dc5-4b15-9ab3-7a641f3cbce2","Type":"ContainerDied","Data":"7231ddf2eb85d7500d0b0a7aa65bda1d8a3548eb11b0e798da9af30b3b2d2b89"} Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.072179 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rbmg" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.072197 4791 scope.go:117] "RemoveContainer" containerID="2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.101066 4791 scope.go:117] "RemoveContainer" containerID="89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.122229 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rbmg"] Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.125739 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rbmg"] Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.141405 4791 scope.go:117] "RemoveContainer" containerID="ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.161088 4791 scope.go:117] "RemoveContainer" containerID="2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450" Dec 08 21:30:05 crc kubenswrapper[4791]: E1208 21:30:05.161634 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450\": container with ID starting with 2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450 not found: ID does not exist" containerID="2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.161692 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450"} err="failed to get container status \"2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450\": rpc error: code = NotFound desc = could not find container \"2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450\": container with ID starting with 2d4a7c4919925aae65028a1dedd9a7e3f15b9074dedfe8e752efebdf7aa06450 not found: ID does not exist" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.161747 4791 scope.go:117] "RemoveContainer" containerID="89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b" Dec 08 21:30:05 crc kubenswrapper[4791]: E1208 21:30:05.162060 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b\": container with ID starting with 89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b not found: ID does not exist" containerID="89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.162094 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b"} err="failed to get container status \"89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b\": rpc error: code = NotFound desc = could not find container \"89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b\": container with ID starting with 89a069f9aca6c359b785529a00d7f1a07e16873340b517f0f2a052f65880e25b not found: ID does not exist" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.162115 4791 scope.go:117] "RemoveContainer" containerID="ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c" Dec 08 21:30:05 crc kubenswrapper[4791]: E1208 21:30:05.162413 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c\": container with ID starting with ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c not found: ID does not exist" containerID="ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.162466 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c"} err="failed to get container status \"ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c\": rpc error: code = NotFound desc = could not find container \"ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c\": container with ID starting with ea8f699d846fd8e95779eb99062bfff08833c478220b9190918f9c99eee4c80c not found: ID does not exist" Dec 08 21:30:05 crc kubenswrapper[4791]: I1208 21:30:05.607073 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" path="/var/lib/kubelet/pods/cc93922e-1dc5-4b15-9ab3-7a641f3cbce2/volumes" Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.082387 4791 generic.go:334] "Generic (PLEG): container finished" podID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerID="193dbd6c1be8644c05114394b0eb2aa228e45aa206fe44c69c5bd47e390405ec" exitCode=0 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.082466 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" event={"ID":"3af99b44-74f9-417f-9b20-ee09a09fe7d1","Type":"ContainerDied","Data":"193dbd6c1be8644c05114394b0eb2aa228e45aa206fe44c69c5bd47e390405ec"} Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.880807 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dk8tz"] Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.882181 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-controller" containerID="cri-o://1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce" gracePeriod=30 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.882243 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="nbdb" containerID="cri-o://1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6" gracePeriod=30 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.882458 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="northd" containerID="cri-o://32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a" gracePeriod=30 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.882576 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007" gracePeriod=30 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.882676 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-node" containerID="cri-o://92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125" gracePeriod=30 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.882877 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-acl-logging" containerID="cri-o://37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56" gracePeriod=30 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.882901 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="sbdb" containerID="cri-o://3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936" gracePeriod=30 Dec 08 21:30:06 crc kubenswrapper[4791]: I1208 21:30:06.924764 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovnkube-controller" containerID="cri-o://edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462" gracePeriod=30 Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.241079 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.313538 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-bundle\") pod \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.313589 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-util\") pod \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.313611 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lqwv\" (UniqueName: \"kubernetes.io/projected/3af99b44-74f9-417f-9b20-ee09a09fe7d1-kube-api-access-6lqwv\") pod \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\" (UID: \"3af99b44-74f9-417f-9b20-ee09a09fe7d1\") " Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.316161 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-bundle" (OuterVolumeSpecName: "bundle") pod "3af99b44-74f9-417f-9b20-ee09a09fe7d1" (UID: "3af99b44-74f9-417f-9b20-ee09a09fe7d1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.319058 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3af99b44-74f9-417f-9b20-ee09a09fe7d1-kube-api-access-6lqwv" (OuterVolumeSpecName: "kube-api-access-6lqwv") pod "3af99b44-74f9-417f-9b20-ee09a09fe7d1" (UID: "3af99b44-74f9-417f-9b20-ee09a09fe7d1"). InnerVolumeSpecName "kube-api-access-6lqwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.323759 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-util" (OuterVolumeSpecName: "util") pod "3af99b44-74f9-417f-9b20-ee09a09fe7d1" (UID: "3af99b44-74f9-417f-9b20-ee09a09fe7d1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.414409 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.414443 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lqwv\" (UniqueName: \"kubernetes.io/projected/3af99b44-74f9-417f-9b20-ee09a09fe7d1-kube-api-access-6lqwv\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:07 crc kubenswrapper[4791]: I1208 21:30:07.414456 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3af99b44-74f9-417f-9b20-ee09a09fe7d1-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.099921 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" event={"ID":"3af99b44-74f9-417f-9b20-ee09a09fe7d1","Type":"ContainerDied","Data":"0148c319cdd36d733a34ac045f855daf8b38d6dd64a2782c276431f7aecd2c74"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.100220 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0148c319cdd36d733a34ac045f855daf8b38d6dd64a2782c276431f7aecd2c74" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.100006 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.101304 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-49gdc_dd95c042-30cb-438f-8e98-9aebe3ea93bc/kube-multus/0.log" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.101330 4791 generic.go:334] "Generic (PLEG): container finished" podID="dd95c042-30cb-438f-8e98-9aebe3ea93bc" containerID="4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55" exitCode=2 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.101371 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-49gdc" event={"ID":"dd95c042-30cb-438f-8e98-9aebe3ea93bc","Type":"ContainerDied","Data":"4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.101675 4791 scope.go:117] "RemoveContainer" containerID="4f25965cfe0a07212e4d159d38b276988beeef898ceacdffa62335cf03e0eb55" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.107963 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dk8tz_0b3968ef-9912-4b4e-bb09-95ab9d9c19c5/ovn-acl-logging/0.log" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109501 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dk8tz_0b3968ef-9912-4b4e-bb09-95ab9d9c19c5/ovn-controller/0.log" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109878 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462" exitCode=0 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109907 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936" exitCode=0 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109915 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6" exitCode=0 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109922 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a" exitCode=0 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109932 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007" exitCode=0 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109939 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125" exitCode=0 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109949 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56" exitCode=143 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109962 4791 generic.go:334] "Generic (PLEG): container finished" podID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerID="1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce" exitCode=143 Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109942 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.109993 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110009 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110019 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110030 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110040 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110050 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110059 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110072 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" event={"ID":"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5","Type":"ContainerDied","Data":"c60211e1d44c665738cfe3a046e7c78aa331225d6c7c26c2f3e1043dc2e3d3c2"} Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.110084 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c60211e1d44c665738cfe3a046e7c78aa331225d6c7c26c2f3e1043dc2e3d3c2" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.196390 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dk8tz_0b3968ef-9912-4b4e-bb09-95ab9d9c19c5/ovn-acl-logging/0.log" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.196885 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dk8tz_0b3968ef-9912-4b4e-bb09-95ab9d9c19c5/ovn-controller/0.log" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.197590 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.299683 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l5qqv"] Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300068 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="extract-utilities" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300083 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="extract-utilities" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300098 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-controller" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300104 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-controller" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300112 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="sbdb" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300118 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="sbdb" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300128 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="northd" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300134 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="northd" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300142 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-acl-logging" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300149 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-acl-logging" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300159 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerName="pull" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300165 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerName="pull" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300176 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-node" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300184 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-node" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300192 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300198 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300206 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="registry-server" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300212 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="registry-server" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300219 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerName="extract" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300224 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerName="extract" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300233 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kubecfg-setup" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300239 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kubecfg-setup" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300248 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="extract-content" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300254 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="extract-content" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300261 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="nbdb" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300267 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="nbdb" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300278 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerName="util" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300283 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerName="util" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300291 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovnkube-controller" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300297 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovnkube-controller" Dec 08 21:30:08 crc kubenswrapper[4791]: E1208 21:30:08.300306 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" containerName="collect-profiles" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300312 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" containerName="collect-profiles" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300444 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc93922e-1dc5-4b15-9ab3-7a641f3cbce2" containerName="registry-server" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300460 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300469 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-controller" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300485 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovn-acl-logging" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300497 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" containerName="collect-profiles" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300506 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="northd" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300516 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="kube-rbac-proxy-node" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300525 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="ovnkube-controller" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300532 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="sbdb" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300540 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" containerName="nbdb" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.300549 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="3af99b44-74f9-417f-9b20-ee09a09fe7d1" containerName="extract" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.303117 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381439 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-env-overrides\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381771 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovn-node-metrics-cert\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381804 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-var-lib-openvswitch\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381829 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-node-log\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381860 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-ovn\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381885 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-ovn-kubernetes\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381917 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381951 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381975 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.381988 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-node-log" (OuterVolumeSpecName: "node-log") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382009 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382032 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-netns\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382157 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382322 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-openvswitch\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382351 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-netd\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382374 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-log-socket\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382395 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-etc-openvswitch\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382440 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-slash\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382487 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-systemd\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382519 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-script-lib\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382559 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382590 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-kubelet\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382519 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-log-socket" (OuterVolumeSpecName: "log-socket") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382649 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-systemd-units\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382534 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382548 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382703 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfx26\" (UniqueName: \"kubernetes.io/projected/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-kube-api-access-dfx26\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382751 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-config\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382770 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-bin\") pod \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\" (UID: \"0b3968ef-9912-4b4e-bb09-95ab9d9c19c5\") " Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382960 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-cni-netd\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.382987 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-env-overrides\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383024 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-run-ovn-kubernetes\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383057 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovnkube-script-lib\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383078 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-slash\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383125 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovn-node-metrics-cert\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383141 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-ovn\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383158 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383173 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovnkube-config\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383195 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-kubelet\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383208 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-systemd\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383222 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-log-socket\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383270 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-etc-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383310 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383261 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383297 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383358 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383379 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-slash" (OuterVolumeSpecName: "host-slash") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383382 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383411 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383811 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.383924 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384016 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-node-log\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384047 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58pr6\" (UniqueName: \"kubernetes.io/projected/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-kube-api-access-58pr6\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384074 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-cni-bin\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384104 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-var-lib-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384208 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-run-netns\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384250 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-systemd-units\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384343 4791 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-node-log\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384377 4791 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384388 4791 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384402 4791 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384413 4791 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384424 4791 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384435 4791 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-log-socket\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384447 4791 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384460 4791 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-slash\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384471 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384479 4791 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384489 4791 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384499 4791 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384508 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384517 4791 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384525 4791 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.384534 4791 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.389359 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-kube-api-access-dfx26" (OuterVolumeSpecName: "kube-api-access-dfx26") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "kube-api-access-dfx26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.390351 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.399065 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" (UID: "0b3968ef-9912-4b4e-bb09-95ab9d9c19c5"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486392 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-run-ovn-kubernetes\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486488 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovnkube-script-lib\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486522 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-slash\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486565 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovn-node-metrics-cert\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486584 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-ovn\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486648 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486669 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovnkube-config\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486689 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-kubelet\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486710 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-systemd\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486746 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-log-socket\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486753 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-slash\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486790 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-etc-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486867 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-etc-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486915 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.486991 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-node-log\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487013 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58pr6\" (UniqueName: \"kubernetes.io/projected/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-kube-api-access-58pr6\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487056 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-cni-bin\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487160 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-var-lib-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487196 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-run-netns\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487221 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-systemd-units\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487272 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-cni-netd\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487293 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-env-overrides\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487305 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-ovn\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487383 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-systemd\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487385 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-log-socket\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487405 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-systemd-units\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487420 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-cni-netd\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487429 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-run-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487441 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovnkube-script-lib\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487599 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487631 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-kubelet\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487644 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-cni-bin\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487674 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-node-log\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487712 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-var-lib-openvswitch\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487764 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-run-netns\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.487807 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-host-run-ovn-kubernetes\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.488160 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-env-overrides\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.488212 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovnkube-config\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.488656 4791 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.488916 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfx26\" (UniqueName: \"kubernetes.io/projected/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-kube-api-access-dfx26\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.489007 4791 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.491186 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-ovn-node-metrics-cert\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.507259 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58pr6\" (UniqueName: \"kubernetes.io/projected/e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99-kube-api-access-58pr6\") pod \"ovnkube-node-l5qqv\" (UID: \"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:08 crc kubenswrapper[4791]: I1208 21:30:08.620989 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.122410 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-49gdc_dd95c042-30cb-438f-8e98-9aebe3ea93bc/kube-multus/0.log" Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.122586 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-49gdc" event={"ID":"dd95c042-30cb-438f-8e98-9aebe3ea93bc","Type":"ContainerStarted","Data":"a2458ec817c22024a0cdcb8399b23e961b7493c6f88a5fdc2f877842728ab509"} Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.127064 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerDied","Data":"aed4eec9a822141841fcaeed9fc43b1bd3137bacb00f754fc51cf7708a3d30d9"} Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.127883 4791 generic.go:334] "Generic (PLEG): container finished" podID="e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99" containerID="aed4eec9a822141841fcaeed9fc43b1bd3137bacb00f754fc51cf7708a3d30d9" exitCode=0 Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.128128 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dk8tz" Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.128127 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"9364900aa79ce1faee14385e0c3474549486909118efbc399e45dd042057d795"} Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.210232 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dk8tz"] Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.214959 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dk8tz"] Dec 08 21:30:09 crc kubenswrapper[4791]: I1208 21:30:09.608751 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b3968ef-9912-4b4e-bb09-95ab9d9c19c5" path="/var/lib/kubelet/pods/0b3968ef-9912-4b4e-bb09-95ab9d9c19c5/volumes" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.327107 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk"] Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.328572 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.331262 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.332771 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.336453 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-zxqtp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.402984 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96vbr\" (UniqueName: \"kubernetes.io/projected/5bf6b4bb-0cd5-4461-b351-def18dd64e8c-kube-api-access-96vbr\") pod \"obo-prometheus-operator-668cf9dfbb-8ckfk\" (UID: \"5bf6b4bb-0cd5-4461-b351-def18dd64e8c\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.494218 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c"] Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.495292 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.504220 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96vbr\" (UniqueName: \"kubernetes.io/projected/5bf6b4bb-0cd5-4461-b351-def18dd64e8c-kube-api-access-96vbr\") pod \"obo-prometheus-operator-668cf9dfbb-8ckfk\" (UID: \"5bf6b4bb-0cd5-4461-b351-def18dd64e8c\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.518695 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.534804 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk"] Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.535967 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.545284 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-nsmp9" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.589746 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96vbr\" (UniqueName: \"kubernetes.io/projected/5bf6b4bb-0cd5-4461-b351-def18dd64e8c-kube-api-access-96vbr\") pod \"obo-prometheus-operator-668cf9dfbb-8ckfk\" (UID: \"5bf6b4bb-0cd5-4461-b351-def18dd64e8c\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.606894 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c2830318-9f0e-4406-a86b-0622bd55b65b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk\" (UID: \"c2830318-9f0e-4406-a86b-0622bd55b65b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.607025 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/df480993-0603-450c-9cec-1e3f5472e67a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c\" (UID: \"df480993-0603-450c-9cec-1e3f5472e67a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.607095 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/df480993-0603-450c-9cec-1e3f5472e67a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c\" (UID: \"df480993-0603-450c-9cec-1e3f5472e67a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.607118 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c2830318-9f0e-4406-a86b-0622bd55b65b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk\" (UID: \"c2830318-9f0e-4406-a86b-0622bd55b65b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.648578 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.708181 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c2830318-9f0e-4406-a86b-0622bd55b65b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk\" (UID: \"c2830318-9f0e-4406-a86b-0622bd55b65b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.708271 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/df480993-0603-450c-9cec-1e3f5472e67a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c\" (UID: \"df480993-0603-450c-9cec-1e3f5472e67a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.708317 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/df480993-0603-450c-9cec-1e3f5472e67a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c\" (UID: \"df480993-0603-450c-9cec-1e3f5472e67a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.708346 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c2830318-9f0e-4406-a86b-0622bd55b65b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk\" (UID: \"c2830318-9f0e-4406-a86b-0622bd55b65b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.716228 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/df480993-0603-450c-9cec-1e3f5472e67a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c\" (UID: \"df480993-0603-450c-9cec-1e3f5472e67a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.716644 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c2830318-9f0e-4406-a86b-0622bd55b65b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk\" (UID: \"c2830318-9f0e-4406-a86b-0622bd55b65b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.740491 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c2830318-9f0e-4406-a86b-0622bd55b65b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk\" (UID: \"c2830318-9f0e-4406-a86b-0622bd55b65b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.740517 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/df480993-0603-450c-9cec-1e3f5472e67a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c\" (UID: \"df480993-0603-450c-9cec-1e3f5472e67a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.748947 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-m6mjp"] Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.749757 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.754501 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-c65ff" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.754702 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.809450 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwkhs\" (UniqueName: \"kubernetes.io/projected/e908ab9f-9726-406c-afae-77d716c404e3-kube-api-access-nwkhs\") pod \"observability-operator-d8bb48f5d-m6mjp\" (UID: \"e908ab9f-9726-406c-afae-77d716c404e3\") " pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.809543 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/e908ab9f-9726-406c-afae-77d716c404e3-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-m6mjp\" (UID: \"e908ab9f-9726-406c-afae-77d716c404e3\") " pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.813176 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.860049 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.910584 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwkhs\" (UniqueName: \"kubernetes.io/projected/e908ab9f-9726-406c-afae-77d716c404e3-kube-api-access-nwkhs\") pod \"observability-operator-d8bb48f5d-m6mjp\" (UID: \"e908ab9f-9726-406c-afae-77d716c404e3\") " pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.910672 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/e908ab9f-9726-406c-afae-77d716c404e3-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-m6mjp\" (UID: \"e908ab9f-9726-406c-afae-77d716c404e3\") " pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.915584 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/e908ab9f-9726-406c-afae-77d716c404e3-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-m6mjp\" (UID: \"e908ab9f-9726-406c-afae-77d716c404e3\") " pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.922189 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-cb4nk"] Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.928182 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.929262 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwkhs\" (UniqueName: \"kubernetes.io/projected/e908ab9f-9726-406c-afae-77d716c404e3-kube-api-access-nwkhs\") pod \"observability-operator-d8bb48f5d-m6mjp\" (UID: \"e908ab9f-9726-406c-afae-77d716c404e3\") " pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:21 crc kubenswrapper[4791]: I1208 21:30:21.932033 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-gffkk" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.012084 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/33a18e1e-100a-4419-9119-7de245332906-openshift-service-ca\") pod \"perses-operator-5446b9c989-cb4nk\" (UID: \"33a18e1e-100a-4419-9119-7de245332906\") " pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.012148 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqfd9\" (UniqueName: \"kubernetes.io/projected/33a18e1e-100a-4419-9119-7de245332906-kube-api-access-sqfd9\") pod \"perses-operator-5446b9c989-cb4nk\" (UID: \"33a18e1e-100a-4419-9119-7de245332906\") " pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.076829 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.114198 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqfd9\" (UniqueName: \"kubernetes.io/projected/33a18e1e-100a-4419-9119-7de245332906-kube-api-access-sqfd9\") pod \"perses-operator-5446b9c989-cb4nk\" (UID: \"33a18e1e-100a-4419-9119-7de245332906\") " pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.114773 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/33a18e1e-100a-4419-9119-7de245332906-openshift-service-ca\") pod \"perses-operator-5446b9c989-cb4nk\" (UID: \"33a18e1e-100a-4419-9119-7de245332906\") " pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.116430 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/33a18e1e-100a-4419-9119-7de245332906-openshift-service-ca\") pod \"perses-operator-5446b9c989-cb4nk\" (UID: \"33a18e1e-100a-4419-9119-7de245332906\") " pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.174303 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqfd9\" (UniqueName: \"kubernetes.io/projected/33a18e1e-100a-4419-9119-7de245332906-kube-api-access-sqfd9\") pod \"perses-operator-5446b9c989-cb4nk\" (UID: \"33a18e1e-100a-4419-9119-7de245332906\") " pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:22 crc kubenswrapper[4791]: I1208 21:30:22.297381 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.225474 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.226204 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4rmhm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-578fb_openshift-marketplace(46942f67-6d8f-4500-80c6-81c8d07c6fe5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.227408 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-578fb" podUID="46942f67-6d8f-4500-80c6-81c8d07c6fe5" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.256249 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(6047dfc8e51bb62a7365014b27691d2dbab4fc5e28f4f74361f522e9b05748cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.256341 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(6047dfc8e51bb62a7365014b27691d2dbab4fc5e28f4f74361f522e9b05748cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.256367 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(6047dfc8e51bb62a7365014b27691d2dbab4fc5e28f4f74361f522e9b05748cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.256426 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators(c2830318-9f0e-4406-a86b-0622bd55b65b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators(c2830318-9f0e-4406-a86b-0622bd55b65b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(6047dfc8e51bb62a7365014b27691d2dbab4fc5e28f4f74361f522e9b05748cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" podUID="c2830318-9f0e-4406-a86b-0622bd55b65b" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.264913 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(5c16fcff0a6fa1d9fd3cecccdadb064a817a84f56843cbd48ceebfba8e1a0735): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.264988 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(5c16fcff0a6fa1d9fd3cecccdadb064a817a84f56843cbd48ceebfba8e1a0735): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.265015 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(5c16fcff0a6fa1d9fd3cecccdadb064a817a84f56843cbd48ceebfba8e1a0735): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.265067 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators(5bf6b4bb-0cd5-4461-b351-def18dd64e8c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators(5bf6b4bb-0cd5-4461-b351-def18dd64e8c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(5c16fcff0a6fa1d9fd3cecccdadb064a817a84f56843cbd48ceebfba8e1a0735): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" podUID="5bf6b4bb-0cd5-4461-b351-def18dd64e8c" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.279008 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(220643159c52505235f353f97e1a8601ca2de78cb2ec246f7f9b8bec3b56a166): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.279080 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(220643159c52505235f353f97e1a8601ca2de78cb2ec246f7f9b8bec3b56a166): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.279105 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(220643159c52505235f353f97e1a8601ca2de78cb2ec246f7f9b8bec3b56a166): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.279154 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-m6mjp_openshift-operators(e908ab9f-9726-406c-afae-77d716c404e3)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-m6mjp_openshift-operators(e908ab9f-9726-406c-afae-77d716c404e3)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(220643159c52505235f353f97e1a8601ca2de78cb2ec246f7f9b8bec3b56a166): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" podUID="e908ab9f-9726-406c-afae-77d716c404e3" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.295933 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(cf29e9d33a1be0ee4cd3803c6428c6c537baccda1afc8e5de74a872089afa515): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.296018 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(cf29e9d33a1be0ee4cd3803c6428c6c537baccda1afc8e5de74a872089afa515): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.296064 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(cf29e9d33a1be0ee4cd3803c6428c6c537baccda1afc8e5de74a872089afa515): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.296106 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators(df480993-0603-450c-9cec-1e3f5472e67a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators(df480993-0603-450c-9cec-1e3f5472e67a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(cf29e9d33a1be0ee4cd3803c6428c6c537baccda1afc8e5de74a872089afa515): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" podUID="df480993-0603-450c-9cec-1e3f5472e67a" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.318389 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(242b5ae956296ec8f26807d0f7da412fa7f9b22dd2b74f43f8a563aa84bded72): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.318479 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(242b5ae956296ec8f26807d0f7da412fa7f9b22dd2b74f43f8a563aa84bded72): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.318510 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(242b5ae956296ec8f26807d0f7da412fa7f9b22dd2b74f43f8a563aa84bded72): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.318575 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-cb4nk_openshift-operators(33a18e1e-100a-4419-9119-7de245332906)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-cb4nk_openshift-operators(33a18e1e-100a-4419-9119-7de245332906)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(242b5ae956296ec8f26807d0f7da412fa7f9b22dd2b74f43f8a563aa84bded72): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" podUID="33a18e1e-100a-4419-9119-7de245332906" Dec 08 21:30:26 crc kubenswrapper[4791]: I1208 21:30:26.487447 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"707410a5076f45a0c16635fabe53e86f3962c41f6fcc79c5cdf6f7e601ee2f83"} Dec 08 21:30:26 crc kubenswrapper[4791]: E1208 21:30:26.489471 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-578fb" podUID="46942f67-6d8f-4500-80c6-81c8d07c6fe5" Dec 08 21:30:27 crc kubenswrapper[4791]: I1208 21:30:27.496280 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"262dffc2e67ba6a4b5e5bd6ae54ad324075ffe36cca0f05a8aeb95d46770fb21"} Dec 08 21:30:27 crc kubenswrapper[4791]: I1208 21:30:27.496548 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"1b2b0e1c2d0bcc50a545a6cbb1e8a32e662313388a4079bae44c05333b88d49f"} Dec 08 21:30:27 crc kubenswrapper[4791]: I1208 21:30:27.496558 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"1d2507602955ab7bd95d083480dfc7ef4a30ab52319cb62d3961096f7f5db801"} Dec 08 21:30:27 crc kubenswrapper[4791]: I1208 21:30:27.496567 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"ab75a77998b2cade8e7e4aee9fb2e18fb5e1cc67d8cdbdba2318c1fea3655d0a"} Dec 08 21:30:27 crc kubenswrapper[4791]: I1208 21:30:27.496575 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"7a838a984794ce0948bfbee9e8b885cffedc47e21378b8bbc1ec1e3445825d43"} Dec 08 21:30:30 crc kubenswrapper[4791]: I1208 21:30:30.516833 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"5d62556e5ae83aaa2fdf50cd15b94c328cc0cb76f7ac191b5f8a0b58d37e842e"} Dec 08 21:30:32 crc kubenswrapper[4791]: I1208 21:30:32.542882 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" event={"ID":"e5ffc71d-6aa4-4311-a4eb-5e664e3f9b99","Type":"ContainerStarted","Data":"13f52810e8df4249e244894aff2982590f6f303e0c7fbc05c995a70582e06f68"} Dec 08 21:30:32 crc kubenswrapper[4791]: I1208 21:30:32.543342 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:32 crc kubenswrapper[4791]: I1208 21:30:32.543399 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:32 crc kubenswrapper[4791]: I1208 21:30:32.543425 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:32 crc kubenswrapper[4791]: I1208 21:30:32.587293 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" podStartSLOduration=24.587254672 podStartE2EDuration="24.587254672s" podCreationTimestamp="2025-12-08 21:30:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:30:32.58359905 +0000 UTC m=+709.282357405" watchObservedRunningTime="2025-12-08 21:30:32.587254672 +0000 UTC m=+709.286013017" Dec 08 21:30:32 crc kubenswrapper[4791]: I1208 21:30:32.594800 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:32 crc kubenswrapper[4791]: I1208 21:30:32.600522 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.867812 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-cb4nk"] Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.867961 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.868512 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.896015 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-m6mjp"] Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.896292 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.896853 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.900657 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk"] Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.900808 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.901297 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.911052 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk"] Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.911209 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.911676 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.920674 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c"] Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.920817 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:33 crc kubenswrapper[4791]: I1208 21:30:33.921285 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:33 crc kubenswrapper[4791]: E1208 21:30:33.924341 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(4cb511d5bd3939096ff09a09ec1fdc884e472e653fee3efb8ded3b90d5649c94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:33 crc kubenswrapper[4791]: E1208 21:30:33.924419 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(4cb511d5bd3939096ff09a09ec1fdc884e472e653fee3efb8ded3b90d5649c94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:33 crc kubenswrapper[4791]: E1208 21:30:33.924452 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(4cb511d5bd3939096ff09a09ec1fdc884e472e653fee3efb8ded3b90d5649c94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:33 crc kubenswrapper[4791]: E1208 21:30:33.924503 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-cb4nk_openshift-operators(33a18e1e-100a-4419-9119-7de245332906)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-cb4nk_openshift-operators(33a18e1e-100a-4419-9119-7de245332906)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-cb4nk_openshift-operators_33a18e1e-100a-4419-9119-7de245332906_0(4cb511d5bd3939096ff09a09ec1fdc884e472e653fee3efb8ded3b90d5649c94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" podUID="33a18e1e-100a-4419-9119-7de245332906" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002199 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(938d177a02bf46e464e83381dbd36440883ea9cbafa1d1615915d2a4da5a0d12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002321 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(938d177a02bf46e464e83381dbd36440883ea9cbafa1d1615915d2a4da5a0d12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002355 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(938d177a02bf46e464e83381dbd36440883ea9cbafa1d1615915d2a4da5a0d12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002442 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-m6mjp_openshift-operators(e908ab9f-9726-406c-afae-77d716c404e3)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-m6mjp_openshift-operators(e908ab9f-9726-406c-afae-77d716c404e3)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-m6mjp_openshift-operators_e908ab9f-9726-406c-afae-77d716c404e3_0(938d177a02bf46e464e83381dbd36440883ea9cbafa1d1615915d2a4da5a0d12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" podUID="e908ab9f-9726-406c-afae-77d716c404e3" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002794 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(18932a2f3ce1f30793cbc43251167f4a95ae578ce38cf05454cb1b9960877070): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002824 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(18932a2f3ce1f30793cbc43251167f4a95ae578ce38cf05454cb1b9960877070): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002872 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(18932a2f3ce1f30793cbc43251167f4a95ae578ce38cf05454cb1b9960877070): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.002908 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators(5bf6b4bb-0cd5-4461-b351-def18dd64e8c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators(5bf6b4bb-0cd5-4461-b351-def18dd64e8c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-8ckfk_openshift-operators_5bf6b4bb-0cd5-4461-b351-def18dd64e8c_0(18932a2f3ce1f30793cbc43251167f4a95ae578ce38cf05454cb1b9960877070): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" podUID="5bf6b4bb-0cd5-4461-b351-def18dd64e8c" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.018354 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(9efddf3ca979e8aacd3adf7cf9fc6488f31b8c47e6c1b0de85adc752dad1d9c8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.018434 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(9efddf3ca979e8aacd3adf7cf9fc6488f31b8c47e6c1b0de85adc752dad1d9c8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.018462 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(9efddf3ca979e8aacd3adf7cf9fc6488f31b8c47e6c1b0de85adc752dad1d9c8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.018515 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators(c2830318-9f0e-4406-a86b-0622bd55b65b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators(c2830318-9f0e-4406-a86b-0622bd55b65b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_openshift-operators_c2830318-9f0e-4406-a86b-0622bd55b65b_0(9efddf3ca979e8aacd3adf7cf9fc6488f31b8c47e6c1b0de85adc752dad1d9c8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" podUID="c2830318-9f0e-4406-a86b-0622bd55b65b" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.025825 4791 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(ba8f09cf4d8cb9352ed95a806a390804c21742dc46e0e981768ddc95d49a08cb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.025955 4791 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(ba8f09cf4d8cb9352ed95a806a390804c21742dc46e0e981768ddc95d49a08cb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.026201 4791 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(ba8f09cf4d8cb9352ed95a806a390804c21742dc46e0e981768ddc95d49a08cb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:34 crc kubenswrapper[4791]: E1208 21:30:34.026295 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators(df480993-0603-450c-9cec-1e3f5472e67a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators(df480993-0603-450c-9cec-1e3f5472e67a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_openshift-operators_df480993-0603-450c-9cec-1e3f5472e67a_0(ba8f09cf4d8cb9352ed95a806a390804c21742dc46e0e981768ddc95d49a08cb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" podUID="df480993-0603-450c-9cec-1e3f5472e67a" Dec 08 21:30:38 crc kubenswrapper[4791]: I1208 21:30:38.647511 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l5qqv" Dec 08 21:30:43 crc kubenswrapper[4791]: I1208 21:30:43.624092 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-578fb" event={"ID":"46942f67-6d8f-4500-80c6-81c8d07c6fe5","Type":"ContainerStarted","Data":"476ab4d83617a0bd5de1bb51023f7f931e59d0682cdfd12219b429a12463d129"} Dec 08 21:30:43 crc kubenswrapper[4791]: I1208 21:30:43.946329 4791 scope.go:117] "RemoveContainer" containerID="37c2a697912d045e363b17699528d1f17acd50d3a0883e40b9023d72f3cf5e56" Dec 08 21:30:43 crc kubenswrapper[4791]: I1208 21:30:43.964647 4791 scope.go:117] "RemoveContainer" containerID="1ea79a956f5bc31b744c699f976d260048fa6d0c69e5d96cc3e832c697a796ce" Dec 08 21:30:43 crc kubenswrapper[4791]: I1208 21:30:43.980779 4791 scope.go:117] "RemoveContainer" containerID="1fe5fcb37182b9302ac277ae161589f867ee9126114052e88a82048e319595c6" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.003258 4791 scope.go:117] "RemoveContainer" containerID="92ac0f9db30045f8ac6e2997ecb935634f31f5ee0cbb2da74e87c344df3ed125" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.041364 4791 scope.go:117] "RemoveContainer" containerID="3aa9dedf617d8d5c7f71b58eb379feceb649024dfd1ba3711088afbb5b873936" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.058870 4791 scope.go:117] "RemoveContainer" containerID="edf0bc928bbf2c2486d9351ffca88f382b57055225c9ddb099d15cc38fc12462" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.081541 4791 scope.go:117] "RemoveContainer" containerID="7938544ef5c904b0a752cb91848ba48897df503bee8c19a431137328620c351e" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.097598 4791 scope.go:117] "RemoveContainer" containerID="32b83e290f40edffd1d7679ef280a32bff978b35e9b0fcd1f179d3707021ba3a" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.142493 4791 scope.go:117] "RemoveContainer" containerID="22cca113e278abf9667c373147d1bf61b951ae07e7e1046e13ebf28aa0355007" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.597766 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.598569 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.632621 4791 generic.go:334] "Generic (PLEG): container finished" podID="46942f67-6d8f-4500-80c6-81c8d07c6fe5" containerID="476ab4d83617a0bd5de1bb51023f7f931e59d0682cdfd12219b429a12463d129" exitCode=0 Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.632662 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-578fb" event={"ID":"46942f67-6d8f-4500-80c6-81c8d07c6fe5","Type":"ContainerDied","Data":"476ab4d83617a0bd5de1bb51023f7f931e59d0682cdfd12219b429a12463d129"} Dec 08 21:30:44 crc kubenswrapper[4791]: I1208 21:30:44.947400 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-m6mjp"] Dec 08 21:30:44 crc kubenswrapper[4791]: W1208 21:30:44.957075 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode908ab9f_9726_406c_afae_77d716c404e3.slice/crio-1f467ca6f8cd66291755a5daf78718a0640062fc26d99b85cfc02b491561813c WatchSource:0}: Error finding container 1f467ca6f8cd66291755a5daf78718a0640062fc26d99b85cfc02b491561813c: Status 404 returned error can't find the container with id 1f467ca6f8cd66291755a5daf78718a0640062fc26d99b85cfc02b491561813c Dec 08 21:30:45 crc kubenswrapper[4791]: I1208 21:30:45.597674 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:45 crc kubenswrapper[4791]: I1208 21:30:45.598657 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" Dec 08 21:30:45 crc kubenswrapper[4791]: I1208 21:30:45.651353 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" event={"ID":"e908ab9f-9726-406c-afae-77d716c404e3","Type":"ContainerStarted","Data":"1f467ca6f8cd66291755a5daf78718a0640062fc26d99b85cfc02b491561813c"} Dec 08 21:30:46 crc kubenswrapper[4791]: I1208 21:30:46.023336 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk"] Dec 08 21:30:46 crc kubenswrapper[4791]: W1208 21:30:46.033459 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2830318_9f0e_4406_a86b_0622bd55b65b.slice/crio-fea9d3e6479551ed0a8c73798b048402578ee1c46d471f8eee0aa70e790bb00d WatchSource:0}: Error finding container fea9d3e6479551ed0a8c73798b048402578ee1c46d471f8eee0aa70e790bb00d: Status 404 returned error can't find the container with id fea9d3e6479551ed0a8c73798b048402578ee1c46d471f8eee0aa70e790bb00d Dec 08 21:30:46 crc kubenswrapper[4791]: I1208 21:30:46.661162 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-578fb" event={"ID":"46942f67-6d8f-4500-80c6-81c8d07c6fe5","Type":"ContainerStarted","Data":"08d1f9faaf9f281be124774e69e2bf7908a14471b9d6dd845264e5a3c962736d"} Dec 08 21:30:46 crc kubenswrapper[4791]: I1208 21:30:46.663576 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" event={"ID":"c2830318-9f0e-4406-a86b-0622bd55b65b","Type":"ContainerStarted","Data":"fea9d3e6479551ed0a8c73798b048402578ee1c46d471f8eee0aa70e790bb00d"} Dec 08 21:30:46 crc kubenswrapper[4791]: I1208 21:30:46.684759 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-578fb" podStartSLOduration=2.9601735099999997 podStartE2EDuration="44.684740332s" podCreationTimestamp="2025-12-08 21:30:02 +0000 UTC" firstStartedPulling="2025-12-08 21:30:03.850960424 +0000 UTC m=+680.549718769" lastFinishedPulling="2025-12-08 21:30:45.575527246 +0000 UTC m=+722.274285591" observedRunningTime="2025-12-08 21:30:46.678511888 +0000 UTC m=+723.377270233" watchObservedRunningTime="2025-12-08 21:30:46.684740332 +0000 UTC m=+723.383498677" Dec 08 21:30:47 crc kubenswrapper[4791]: I1208 21:30:47.598000 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:47 crc kubenswrapper[4791]: I1208 21:30:47.597992 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:47 crc kubenswrapper[4791]: I1208 21:30:47.598595 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:30:47 crc kubenswrapper[4791]: I1208 21:30:47.599057 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" Dec 08 21:30:48 crc kubenswrapper[4791]: I1208 21:30:48.432439 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-cb4nk"] Dec 08 21:30:48 crc kubenswrapper[4791]: W1208 21:30:48.441115 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33a18e1e_100a_4419_9119_7de245332906.slice/crio-b97ad7e5c1072938322d24a5a3680c458508d590bbf91961a0538a42aced5d9f WatchSource:0}: Error finding container b97ad7e5c1072938322d24a5a3680c458508d590bbf91961a0538a42aced5d9f: Status 404 returned error can't find the container with id b97ad7e5c1072938322d24a5a3680c458508d590bbf91961a0538a42aced5d9f Dec 08 21:30:48 crc kubenswrapper[4791]: I1208 21:30:48.546007 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c"] Dec 08 21:30:48 crc kubenswrapper[4791]: W1208 21:30:48.554397 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf480993_0603_450c_9cec_1e3f5472e67a.slice/crio-6c229d0bdb0fca999d3721e5ed02e6e9c60c94dc49a03fba5347bb1abc3c709e WatchSource:0}: Error finding container 6c229d0bdb0fca999d3721e5ed02e6e9c60c94dc49a03fba5347bb1abc3c709e: Status 404 returned error can't find the container with id 6c229d0bdb0fca999d3721e5ed02e6e9c60c94dc49a03fba5347bb1abc3c709e Dec 08 21:30:48 crc kubenswrapper[4791]: I1208 21:30:48.597597 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:48 crc kubenswrapper[4791]: I1208 21:30:48.598275 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" Dec 08 21:30:48 crc kubenswrapper[4791]: I1208 21:30:48.696623 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" event={"ID":"33a18e1e-100a-4419-9119-7de245332906","Type":"ContainerStarted","Data":"b97ad7e5c1072938322d24a5a3680c458508d590bbf91961a0538a42aced5d9f"} Dec 08 21:30:48 crc kubenswrapper[4791]: I1208 21:30:48.698912 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" event={"ID":"df480993-0603-450c-9cec-1e3f5472e67a","Type":"ContainerStarted","Data":"6c229d0bdb0fca999d3721e5ed02e6e9c60c94dc49a03fba5347bb1abc3c709e"} Dec 08 21:30:49 crc kubenswrapper[4791]: I1208 21:30:49.181096 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk"] Dec 08 21:30:49 crc kubenswrapper[4791]: W1208 21:30:49.207901 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bf6b4bb_0cd5_4461_b351_def18dd64e8c.slice/crio-dd70f38372c2d30b8d4d6d4bfa486dc753eb64dce18b9a2214456733fa3379fc WatchSource:0}: Error finding container dd70f38372c2d30b8d4d6d4bfa486dc753eb64dce18b9a2214456733fa3379fc: Status 404 returned error can't find the container with id dd70f38372c2d30b8d4d6d4bfa486dc753eb64dce18b9a2214456733fa3379fc Dec 08 21:30:49 crc kubenswrapper[4791]: I1208 21:30:49.711696 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" event={"ID":"5bf6b4bb-0cd5-4461-b351-def18dd64e8c","Type":"ContainerStarted","Data":"dd70f38372c2d30b8d4d6d4bfa486dc753eb64dce18b9a2214456733fa3379fc"} Dec 08 21:30:52 crc kubenswrapper[4791]: I1208 21:30:52.475024 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:52 crc kubenswrapper[4791]: I1208 21:30:52.475303 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:30:53 crc kubenswrapper[4791]: I1208 21:30:53.544600 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-578fb" podUID="46942f67-6d8f-4500-80c6-81c8d07c6fe5" containerName="registry-server" probeResult="failure" output=< Dec 08 21:30:53 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 21:30:53 crc kubenswrapper[4791]: > Dec 08 21:31:01 crc kubenswrapper[4791]: E1208 21:31:01.896538 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 08 21:31:01 crc kubenswrapper[4791]: E1208 21:31:01.897511 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nwkhs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-m6mjp_openshift-operators(e908ab9f-9726-406c-afae-77d716c404e3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:31:01 crc kubenswrapper[4791]: E1208 21:31:01.898778 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" podUID="e908ab9f-9726-406c-afae-77d716c404e3" Dec 08 21:31:02 crc kubenswrapper[4791]: E1208 21:31:02.319361 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" podUID="e908ab9f-9726-406c-afae-77d716c404e3" Dec 08 21:31:02 crc kubenswrapper[4791]: I1208 21:31:02.519489 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:31:02 crc kubenswrapper[4791]: I1208 21:31:02.570810 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-578fb" Dec 08 21:31:02 crc kubenswrapper[4791]: I1208 21:31:02.667444 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-578fb"] Dec 08 21:31:02 crc kubenswrapper[4791]: I1208 21:31:02.757667 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sv4g6"] Dec 08 21:31:02 crc kubenswrapper[4791]: I1208 21:31:02.758001 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sv4g6" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="registry-server" containerID="cri-o://095c4c0b9c9231b9bcfe552cbe4b35a0305bb4c7126fb4f2502626feb28492ca" gracePeriod=2 Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.326323 4791 generic.go:334] "Generic (PLEG): container finished" podID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerID="095c4c0b9c9231b9bcfe552cbe4b35a0305bb4c7126fb4f2502626feb28492ca" exitCode=0 Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.326424 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv4g6" event={"ID":"bff6eb2e-73f5-4c5c-8785-dca5aebf0619","Type":"ContainerDied","Data":"095c4c0b9c9231b9bcfe552cbe4b35a0305bb4c7126fb4f2502626feb28492ca"} Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.696112 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.839940 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l477c\" (UniqueName: \"kubernetes.io/projected/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-kube-api-access-l477c\") pod \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.840040 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-utilities\") pod \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.840140 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-catalog-content\") pod \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\" (UID: \"bff6eb2e-73f5-4c5c-8785-dca5aebf0619\") " Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.840920 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-utilities" (OuterVolumeSpecName: "utilities") pod "bff6eb2e-73f5-4c5c-8785-dca5aebf0619" (UID: "bff6eb2e-73f5-4c5c-8785-dca5aebf0619"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.849755 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-kube-api-access-l477c" (OuterVolumeSpecName: "kube-api-access-l477c") pod "bff6eb2e-73f5-4c5c-8785-dca5aebf0619" (UID: "bff6eb2e-73f5-4c5c-8785-dca5aebf0619"). InnerVolumeSpecName "kube-api-access-l477c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.941770 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l477c\" (UniqueName: \"kubernetes.io/projected/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-kube-api-access-l477c\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.941810 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:03 crc kubenswrapper[4791]: I1208 21:31:03.963862 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bff6eb2e-73f5-4c5c-8785-dca5aebf0619" (UID: "bff6eb2e-73f5-4c5c-8785-dca5aebf0619"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.043828 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bff6eb2e-73f5-4c5c-8785-dca5aebf0619-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.336949 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv4g6" event={"ID":"bff6eb2e-73f5-4c5c-8785-dca5aebf0619","Type":"ContainerDied","Data":"efee163659e83ad5ef2a9eddd1c31973c42aadec3a6a54bcab0f0d84305336b4"} Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.337035 4791 scope.go:117] "RemoveContainer" containerID="095c4c0b9c9231b9bcfe552cbe4b35a0305bb4c7126fb4f2502626feb28492ca" Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.337849 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv4g6" Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.351053 4791 scope.go:117] "RemoveContainer" containerID="44b02f9e415eaca12b17b10f2343e7aec67b80fad3a3319ba949a92a0fd0865b" Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.367057 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sv4g6"] Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.371928 4791 scope.go:117] "RemoveContainer" containerID="0fd976ec95eb972d6a82d15eb6deda35563e7e21b774bdec0eebf62b4ab10c7d" Dec 08 21:31:04 crc kubenswrapper[4791]: I1208 21:31:04.372992 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sv4g6"] Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.251144 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.251450 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.345052 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" event={"ID":"5bf6b4bb-0cd5-4461-b351-def18dd64e8c","Type":"ContainerStarted","Data":"1d722df7ae0139a96dd833aa40cbfe6f95e38f8a621e304768c0f948309c0c11"} Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.346735 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" event={"ID":"df480993-0603-450c-9cec-1e3f5472e67a","Type":"ContainerStarted","Data":"66999ba530d91422d3abaf0c176d8debd1fee01b452a50921df94c19e06e7058"} Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.348743 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" event={"ID":"33a18e1e-100a-4419-9119-7de245332906","Type":"ContainerStarted","Data":"a4b5349ee9ca26d90a57f05edabb8b848a952924ca732088a3fbff8b3195c8a9"} Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.348786 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.351314 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" event={"ID":"c2830318-9f0e-4406-a86b-0622bd55b65b","Type":"ContainerStarted","Data":"de4210db2cb08b0eaadf2119c51f8455102db4ac56b822f70cc8c7264515a9ea"} Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.380303 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8ckfk" podStartSLOduration=30.121199995 podStartE2EDuration="44.380276989s" podCreationTimestamp="2025-12-08 21:30:21 +0000 UTC" firstStartedPulling="2025-12-08 21:30:49.220115763 +0000 UTC m=+725.918874108" lastFinishedPulling="2025-12-08 21:31:03.479192757 +0000 UTC m=+740.177951102" observedRunningTime="2025-12-08 21:31:05.376800133 +0000 UTC m=+742.075558488" watchObservedRunningTime="2025-12-08 21:31:05.380276989 +0000 UTC m=+742.079035344" Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.407106 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk" podStartSLOduration=26.970589235 podStartE2EDuration="44.40707848s" podCreationTimestamp="2025-12-08 21:30:21 +0000 UTC" firstStartedPulling="2025-12-08 21:30:46.036176021 +0000 UTC m=+722.734934366" lastFinishedPulling="2025-12-08 21:31:03.472665266 +0000 UTC m=+740.171423611" observedRunningTime="2025-12-08 21:31:05.404883906 +0000 UTC m=+742.103642261" watchObservedRunningTime="2025-12-08 21:31:05.40707848 +0000 UTC m=+742.105836825" Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.426442 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" podStartSLOduration=29.376539273 podStartE2EDuration="44.426425228s" podCreationTimestamp="2025-12-08 21:30:21 +0000 UTC" firstStartedPulling="2025-12-08 21:30:48.445475051 +0000 UTC m=+725.144233406" lastFinishedPulling="2025-12-08 21:31:03.495361016 +0000 UTC m=+740.194119361" observedRunningTime="2025-12-08 21:31:05.424031208 +0000 UTC m=+742.122789563" watchObservedRunningTime="2025-12-08 21:31:05.426425228 +0000 UTC m=+742.125183573" Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.448935 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c" podStartSLOduration=29.535135356 podStartE2EDuration="44.448913362s" podCreationTimestamp="2025-12-08 21:30:21 +0000 UTC" firstStartedPulling="2025-12-08 21:30:48.559110415 +0000 UTC m=+725.257868760" lastFinishedPulling="2025-12-08 21:31:03.472888421 +0000 UTC m=+740.171646766" observedRunningTime="2025-12-08 21:31:05.447021686 +0000 UTC m=+742.145780031" watchObservedRunningTime="2025-12-08 21:31:05.448913362 +0000 UTC m=+742.147671697" Dec 08 21:31:05 crc kubenswrapper[4791]: I1208 21:31:05.605984 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" path="/var/lib/kubelet/pods/bff6eb2e-73f5-4c5c-8785-dca5aebf0619/volumes" Dec 08 21:31:12 crc kubenswrapper[4791]: I1208 21:31:12.300278 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-cb4nk" Dec 08 21:31:17 crc kubenswrapper[4791]: I1208 21:31:17.431196 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" event={"ID":"e908ab9f-9726-406c-afae-77d716c404e3","Type":"ContainerStarted","Data":"a6e31ad78a5c19f9e9fca67b0867b5b17aff1f93e2a0b9cfa95af57d0558bdbe"} Dec 08 21:31:17 crc kubenswrapper[4791]: I1208 21:31:17.431952 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:31:17 crc kubenswrapper[4791]: I1208 21:31:17.440882 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" Dec 08 21:31:17 crc kubenswrapper[4791]: I1208 21:31:17.460161 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-m6mjp" podStartSLOduration=24.740826253 podStartE2EDuration="56.460121478s" podCreationTimestamp="2025-12-08 21:30:21 +0000 UTC" firstStartedPulling="2025-12-08 21:30:44.958791769 +0000 UTC m=+721.657550114" lastFinishedPulling="2025-12-08 21:31:16.678086994 +0000 UTC m=+753.376845339" observedRunningTime="2025-12-08 21:31:17.451577517 +0000 UTC m=+754.150335862" watchObservedRunningTime="2025-12-08 21:31:17.460121478 +0000 UTC m=+754.158879863" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.610501 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6jvt2"] Dec 08 21:31:21 crc kubenswrapper[4791]: E1208 21:31:21.611215 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="extract-content" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.611233 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="extract-content" Dec 08 21:31:21 crc kubenswrapper[4791]: E1208 21:31:21.611255 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="extract-utilities" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.611261 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="extract-utilities" Dec 08 21:31:21 crc kubenswrapper[4791]: E1208 21:31:21.611275 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="registry-server" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.611282 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="registry-server" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.611412 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff6eb2e-73f5-4c5c-8785-dca5aebf0619" containerName="registry-server" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.615268 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.628574 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6jvt2"] Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.785864 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wtnn\" (UniqueName: \"kubernetes.io/projected/f3068029-07b3-4880-bf52-4d3f3ef6f922-kube-api-access-9wtnn\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.785961 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-catalog-content\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.786018 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-utilities\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.887340 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-catalog-content\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.887439 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-utilities\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.887513 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wtnn\" (UniqueName: \"kubernetes.io/projected/f3068029-07b3-4880-bf52-4d3f3ef6f922-kube-api-access-9wtnn\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.887970 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-catalog-content\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.888158 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-utilities\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.926275 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wtnn\" (UniqueName: \"kubernetes.io/projected/f3068029-07b3-4880-bf52-4d3f3ef6f922-kube-api-access-9wtnn\") pod \"certified-operators-6jvt2\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:21 crc kubenswrapper[4791]: I1208 21:31:21.943922 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:22 crc kubenswrapper[4791]: I1208 21:31:22.291588 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6jvt2"] Dec 08 21:31:22 crc kubenswrapper[4791]: I1208 21:31:22.596406 4791 generic.go:334] "Generic (PLEG): container finished" podID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerID="5c63bf5895c96433480db13c0ab2d886bcdf82ba6d8b958ef9d0db9058309dd7" exitCode=0 Dec 08 21:31:22 crc kubenswrapper[4791]: I1208 21:31:22.596450 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6jvt2" event={"ID":"f3068029-07b3-4880-bf52-4d3f3ef6f922","Type":"ContainerDied","Data":"5c63bf5895c96433480db13c0ab2d886bcdf82ba6d8b958ef9d0db9058309dd7"} Dec 08 21:31:22 crc kubenswrapper[4791]: I1208 21:31:22.596476 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6jvt2" event={"ID":"f3068029-07b3-4880-bf52-4d3f3ef6f922","Type":"ContainerStarted","Data":"6a75c757209f8f9b626baccf30b2e0f0826581cb476e216ed3b5272a4be1404b"} Dec 08 21:31:23 crc kubenswrapper[4791]: I1208 21:31:23.606498 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6jvt2" event={"ID":"f3068029-07b3-4880-bf52-4d3f3ef6f922","Type":"ContainerStarted","Data":"1614df5475cf7968d1488391a0c296b83e60c44c42c059f3ae151e7390a8d48f"} Dec 08 21:31:24 crc kubenswrapper[4791]: I1208 21:31:24.613560 4791 generic.go:334] "Generic (PLEG): container finished" podID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerID="1614df5475cf7968d1488391a0c296b83e60c44c42c059f3ae151e7390a8d48f" exitCode=0 Dec 08 21:31:24 crc kubenswrapper[4791]: I1208 21:31:24.613602 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6jvt2" event={"ID":"f3068029-07b3-4880-bf52-4d3f3ef6f922","Type":"ContainerDied","Data":"1614df5475cf7968d1488391a0c296b83e60c44c42c059f3ae151e7390a8d48f"} Dec 08 21:31:25 crc kubenswrapper[4791]: I1208 21:31:25.621761 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6jvt2" event={"ID":"f3068029-07b3-4880-bf52-4d3f3ef6f922","Type":"ContainerStarted","Data":"230d7e88bf44dc3df9cffb503e09c7587750ce912423178a3db770e315cc232c"} Dec 08 21:31:25 crc kubenswrapper[4791]: I1208 21:31:25.645016 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6jvt2" podStartSLOduration=2.221676935 podStartE2EDuration="4.644997372s" podCreationTimestamp="2025-12-08 21:31:21 +0000 UTC" firstStartedPulling="2025-12-08 21:31:22.597773642 +0000 UTC m=+759.296531987" lastFinishedPulling="2025-12-08 21:31:25.021094079 +0000 UTC m=+761.719852424" observedRunningTime="2025-12-08 21:31:25.641052515 +0000 UTC m=+762.339810870" watchObservedRunningTime="2025-12-08 21:31:25.644997372 +0000 UTC m=+762.343755717" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.899856 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-67c78"] Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.901337 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.906169 4791 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-cvpwn" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.906490 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.906635 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.909368 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-67c78"] Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.925606 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pvml8"] Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.926950 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-pvml8" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.937160 4791 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-gbbhb" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.942789 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sd8bs"] Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.944262 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.947875 4791 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-5gp5p" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.958170 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2xpf\" (UniqueName: \"kubernetes.io/projected/46ead9b1-5e64-4c26-a35a-6f6fd7884e1d-kube-api-access-d2xpf\") pod \"cert-manager-5b446d88c5-pvml8\" (UID: \"46ead9b1-5e64-4c26-a35a-6f6fd7884e1d\") " pod="cert-manager/cert-manager-5b446d88c5-pvml8" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.958218 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtn4j\" (UniqueName: \"kubernetes.io/projected/60c082ea-6911-490a-8989-a16e6a63fac6-kube-api-access-xtn4j\") pod \"cert-manager-webhook-5655c58dd6-sd8bs\" (UID: \"60c082ea-6911-490a-8989-a16e6a63fac6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.958251 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kd4n\" (UniqueName: \"kubernetes.io/projected/65d9eb9f-c7d6-4e03-b9b7-061d49ec03af-kube-api-access-4kd4n\") pod \"cert-manager-cainjector-7f985d654d-67c78\" (UID: \"65d9eb9f-c7d6-4e03-b9b7-061d49ec03af\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.962002 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pvml8"] Dec 08 21:31:27 crc kubenswrapper[4791]: I1208 21:31:27.977331 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sd8bs"] Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.059463 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2xpf\" (UniqueName: \"kubernetes.io/projected/46ead9b1-5e64-4c26-a35a-6f6fd7884e1d-kube-api-access-d2xpf\") pod \"cert-manager-5b446d88c5-pvml8\" (UID: \"46ead9b1-5e64-4c26-a35a-6f6fd7884e1d\") " pod="cert-manager/cert-manager-5b446d88c5-pvml8" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.059512 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtn4j\" (UniqueName: \"kubernetes.io/projected/60c082ea-6911-490a-8989-a16e6a63fac6-kube-api-access-xtn4j\") pod \"cert-manager-webhook-5655c58dd6-sd8bs\" (UID: \"60c082ea-6911-490a-8989-a16e6a63fac6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.059546 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kd4n\" (UniqueName: \"kubernetes.io/projected/65d9eb9f-c7d6-4e03-b9b7-061d49ec03af-kube-api-access-4kd4n\") pod \"cert-manager-cainjector-7f985d654d-67c78\" (UID: \"65d9eb9f-c7d6-4e03-b9b7-061d49ec03af\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.085662 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtn4j\" (UniqueName: \"kubernetes.io/projected/60c082ea-6911-490a-8989-a16e6a63fac6-kube-api-access-xtn4j\") pod \"cert-manager-webhook-5655c58dd6-sd8bs\" (UID: \"60c082ea-6911-490a-8989-a16e6a63fac6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.085908 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2xpf\" (UniqueName: \"kubernetes.io/projected/46ead9b1-5e64-4c26-a35a-6f6fd7884e1d-kube-api-access-d2xpf\") pod \"cert-manager-5b446d88c5-pvml8\" (UID: \"46ead9b1-5e64-4c26-a35a-6f6fd7884e1d\") " pod="cert-manager/cert-manager-5b446d88c5-pvml8" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.088639 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kd4n\" (UniqueName: \"kubernetes.io/projected/65d9eb9f-c7d6-4e03-b9b7-061d49ec03af-kube-api-access-4kd4n\") pod \"cert-manager-cainjector-7f985d654d-67c78\" (UID: \"65d9eb9f-c7d6-4e03-b9b7-061d49ec03af\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.257023 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.268299 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-pvml8" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.278378 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" Dec 08 21:31:28 crc kubenswrapper[4791]: I1208 21:31:28.994108 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pvml8"] Dec 08 21:31:29 crc kubenswrapper[4791]: W1208 21:31:29.004637 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46ead9b1_5e64_4c26_a35a_6f6fd7884e1d.slice/crio-e01f1f4d828c4540172375a0dd9040448aaf7fde01af02dab8808af78ed1930f WatchSource:0}: Error finding container e01f1f4d828c4540172375a0dd9040448aaf7fde01af02dab8808af78ed1930f: Status 404 returned error can't find the container with id e01f1f4d828c4540172375a0dd9040448aaf7fde01af02dab8808af78ed1930f Dec 08 21:31:29 crc kubenswrapper[4791]: I1208 21:31:29.033328 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-67c78"] Dec 08 21:31:29 crc kubenswrapper[4791]: W1208 21:31:29.038528 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65d9eb9f_c7d6_4e03_b9b7_061d49ec03af.slice/crio-a54ec1f0f9844ab510b660d17a89198c97576df50f1ed29fca4a74e750be549d WatchSource:0}: Error finding container a54ec1f0f9844ab510b660d17a89198c97576df50f1ed29fca4a74e750be549d: Status 404 returned error can't find the container with id a54ec1f0f9844ab510b660d17a89198c97576df50f1ed29fca4a74e750be549d Dec 08 21:31:29 crc kubenswrapper[4791]: I1208 21:31:29.055820 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sd8bs"] Dec 08 21:31:29 crc kubenswrapper[4791]: W1208 21:31:29.065414 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60c082ea_6911_490a_8989_a16e6a63fac6.slice/crio-e2f41140d2acfc2583232ea0c242c4f1e219a74e80afa2c077e8a82e4087518e WatchSource:0}: Error finding container e2f41140d2acfc2583232ea0c242c4f1e219a74e80afa2c077e8a82e4087518e: Status 404 returned error can't find the container with id e2f41140d2acfc2583232ea0c242c4f1e219a74e80afa2c077e8a82e4087518e Dec 08 21:31:29 crc kubenswrapper[4791]: I1208 21:31:29.649995 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-pvml8" event={"ID":"46ead9b1-5e64-4c26-a35a-6f6fd7884e1d","Type":"ContainerStarted","Data":"e01f1f4d828c4540172375a0dd9040448aaf7fde01af02dab8808af78ed1930f"} Dec 08 21:31:29 crc kubenswrapper[4791]: I1208 21:31:29.651384 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" event={"ID":"60c082ea-6911-490a-8989-a16e6a63fac6","Type":"ContainerStarted","Data":"e2f41140d2acfc2583232ea0c242c4f1e219a74e80afa2c077e8a82e4087518e"} Dec 08 21:31:29 crc kubenswrapper[4791]: I1208 21:31:29.652258 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" event={"ID":"65d9eb9f-c7d6-4e03-b9b7-061d49ec03af","Type":"ContainerStarted","Data":"a54ec1f0f9844ab510b660d17a89198c97576df50f1ed29fca4a74e750be549d"} Dec 08 21:31:31 crc kubenswrapper[4791]: I1208 21:31:31.944750 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:31 crc kubenswrapper[4791]: I1208 21:31:31.945030 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:32 crc kubenswrapper[4791]: I1208 21:31:32.001608 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:32 crc kubenswrapper[4791]: I1208 21:31:32.730133 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:32 crc kubenswrapper[4791]: I1208 21:31:32.780144 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6jvt2"] Dec 08 21:31:34 crc kubenswrapper[4791]: I1208 21:31:34.685191 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6jvt2" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="registry-server" containerID="cri-o://230d7e88bf44dc3df9cffb503e09c7587750ce912423178a3db770e315cc232c" gracePeriod=2 Dec 08 21:31:35 crc kubenswrapper[4791]: I1208 21:31:35.252000 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:31:35 crc kubenswrapper[4791]: I1208 21:31:35.252098 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:31:35 crc kubenswrapper[4791]: I1208 21:31:35.711756 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6jvt2" event={"ID":"f3068029-07b3-4880-bf52-4d3f3ef6f922","Type":"ContainerDied","Data":"230d7e88bf44dc3df9cffb503e09c7587750ce912423178a3db770e315cc232c"} Dec 08 21:31:35 crc kubenswrapper[4791]: I1208 21:31:35.711658 4791 generic.go:334] "Generic (PLEG): container finished" podID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerID="230d7e88bf44dc3df9cffb503e09c7587750ce912423178a3db770e315cc232c" exitCode=0 Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.132286 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.190866 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wtnn\" (UniqueName: \"kubernetes.io/projected/f3068029-07b3-4880-bf52-4d3f3ef6f922-kube-api-access-9wtnn\") pod \"f3068029-07b3-4880-bf52-4d3f3ef6f922\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.191015 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-utilities\") pod \"f3068029-07b3-4880-bf52-4d3f3ef6f922\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.191086 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-catalog-content\") pod \"f3068029-07b3-4880-bf52-4d3f3ef6f922\" (UID: \"f3068029-07b3-4880-bf52-4d3f3ef6f922\") " Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.191740 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-utilities" (OuterVolumeSpecName: "utilities") pod "f3068029-07b3-4880-bf52-4d3f3ef6f922" (UID: "f3068029-07b3-4880-bf52-4d3f3ef6f922"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.195578 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3068029-07b3-4880-bf52-4d3f3ef6f922-kube-api-access-9wtnn" (OuterVolumeSpecName: "kube-api-access-9wtnn") pod "f3068029-07b3-4880-bf52-4d3f3ef6f922" (UID: "f3068029-07b3-4880-bf52-4d3f3ef6f922"). InnerVolumeSpecName "kube-api-access-9wtnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.244737 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3068029-07b3-4880-bf52-4d3f3ef6f922" (UID: "f3068029-07b3-4880-bf52-4d3f3ef6f922"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.294755 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.294946 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3068029-07b3-4880-bf52-4d3f3ef6f922-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.295005 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wtnn\" (UniqueName: \"kubernetes.io/projected/f3068029-07b3-4880-bf52-4d3f3ef6f922-kube-api-access-9wtnn\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.721985 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6jvt2" event={"ID":"f3068029-07b3-4880-bf52-4d3f3ef6f922","Type":"ContainerDied","Data":"6a75c757209f8f9b626baccf30b2e0f0826581cb476e216ed3b5272a4be1404b"} Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.722396 4791 scope.go:117] "RemoveContainer" containerID="230d7e88bf44dc3df9cffb503e09c7587750ce912423178a3db770e315cc232c" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.722032 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6jvt2" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.734807 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-pvml8" event={"ID":"46ead9b1-5e64-4c26-a35a-6f6fd7884e1d","Type":"ContainerStarted","Data":"4d9ef6c325cc7dd6de1686b9656336d4a2d8be0b306926f8bcb6e2b9941288f5"} Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.737337 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" event={"ID":"60c082ea-6911-490a-8989-a16e6a63fac6","Type":"ContainerStarted","Data":"ac848e9f492b0595128748f1b9373d9651a347385d3da3a60c109548a5b7c475"} Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.737968 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.739654 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" event={"ID":"65d9eb9f-c7d6-4e03-b9b7-061d49ec03af","Type":"ContainerStarted","Data":"2ab0eba8d602d2dcb9d1f112d001b55ddfb3f98bd2d7e0d37a79b69b1d28640d"} Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.750400 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6jvt2"] Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.753491 4791 scope.go:117] "RemoveContainer" containerID="1614df5475cf7968d1488391a0c296b83e60c44c42c059f3ae151e7390a8d48f" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.761261 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6jvt2"] Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.774085 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-pvml8" podStartSLOduration=2.631902305 podStartE2EDuration="9.774068653s" podCreationTimestamp="2025-12-08 21:31:27 +0000 UTC" firstStartedPulling="2025-12-08 21:31:29.009861748 +0000 UTC m=+765.708620103" lastFinishedPulling="2025-12-08 21:31:36.152028096 +0000 UTC m=+772.850786451" observedRunningTime="2025-12-08 21:31:36.768315201 +0000 UTC m=+773.467073546" watchObservedRunningTime="2025-12-08 21:31:36.774068653 +0000 UTC m=+773.472826998" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.823899 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" podStartSLOduration=2.768613888 podStartE2EDuration="9.823871002s" podCreationTimestamp="2025-12-08 21:31:27 +0000 UTC" firstStartedPulling="2025-12-08 21:31:29.067691425 +0000 UTC m=+765.766449770" lastFinishedPulling="2025-12-08 21:31:36.122948539 +0000 UTC m=+772.821706884" observedRunningTime="2025-12-08 21:31:36.820862218 +0000 UTC m=+773.519620573" watchObservedRunningTime="2025-12-08 21:31:36.823871002 +0000 UTC m=+773.522629347" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.824744 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-67c78" podStartSLOduration=2.746331487 podStartE2EDuration="9.824735383s" podCreationTimestamp="2025-12-08 21:31:27 +0000 UTC" firstStartedPulling="2025-12-08 21:31:29.04117021 +0000 UTC m=+765.739928555" lastFinishedPulling="2025-12-08 21:31:36.119574106 +0000 UTC m=+772.818332451" observedRunningTime="2025-12-08 21:31:36.807889328 +0000 UTC m=+773.506647673" watchObservedRunningTime="2025-12-08 21:31:36.824735383 +0000 UTC m=+773.523493728" Dec 08 21:31:36 crc kubenswrapper[4791]: I1208 21:31:36.901286 4791 scope.go:117] "RemoveContainer" containerID="5c63bf5895c96433480db13c0ab2d886bcdf82ba6d8b958ef9d0db9058309dd7" Dec 08 21:31:37 crc kubenswrapper[4791]: I1208 21:31:37.608285 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" path="/var/lib/kubelet/pods/f3068029-07b3-4880-bf52-4d3f3ef6f922/volumes" Dec 08 21:31:43 crc kubenswrapper[4791]: I1208 21:31:43.280818 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-sd8bs" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.293320 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l6zn4"] Dec 08 21:32:01 crc kubenswrapper[4791]: E1208 21:32:01.294557 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="extract-utilities" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.294574 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="extract-utilities" Dec 08 21:32:01 crc kubenswrapper[4791]: E1208 21:32:01.294590 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="registry-server" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.294597 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="registry-server" Dec 08 21:32:01 crc kubenswrapper[4791]: E1208 21:32:01.294623 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="extract-content" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.294634 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="extract-content" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.294822 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3068029-07b3-4880-bf52-4d3f3ef6f922" containerName="registry-server" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.296499 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.313332 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l6zn4"] Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.479613 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tm5w\" (UniqueName: \"kubernetes.io/projected/d96ce78a-9e9b-4a5a-b536-3904872262e6-kube-api-access-9tm5w\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.480027 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-catalog-content\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.480100 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-utilities\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.581170 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-utilities\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.581291 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tm5w\" (UniqueName: \"kubernetes.io/projected/d96ce78a-9e9b-4a5a-b536-3904872262e6-kube-api-access-9tm5w\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.581332 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-catalog-content\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.581852 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-catalog-content\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.581893 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-utilities\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.602673 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tm5w\" (UniqueName: \"kubernetes.io/projected/d96ce78a-9e9b-4a5a-b536-3904872262e6-kube-api-access-9tm5w\") pod \"community-operators-l6zn4\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.617519 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:01 crc kubenswrapper[4791]: I1208 21:32:01.949499 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l6zn4"] Dec 08 21:32:02 crc kubenswrapper[4791]: I1208 21:32:02.936584 4791 generic.go:334] "Generic (PLEG): container finished" podID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerID="4b09d8b6bf761eacb7822b7cc2f854821c342509c15882acfb5839010710a454" exitCode=0 Dec 08 21:32:02 crc kubenswrapper[4791]: I1208 21:32:02.936646 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6zn4" event={"ID":"d96ce78a-9e9b-4a5a-b536-3904872262e6","Type":"ContainerDied","Data":"4b09d8b6bf761eacb7822b7cc2f854821c342509c15882acfb5839010710a454"} Dec 08 21:32:02 crc kubenswrapper[4791]: I1208 21:32:02.936679 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6zn4" event={"ID":"d96ce78a-9e9b-4a5a-b536-3904872262e6","Type":"ContainerStarted","Data":"b9e6853e11e4a36c5ce1cdbd1febc4f3d9cf23071d197794234fbdf751a2cf0f"} Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.251615 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.252161 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.252219 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.252893 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"905d580dece9c9ce3b0d4703b0d2f998c3f8e981a36384e1aaaf1f0e9b3109d3"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.252961 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://905d580dece9c9ce3b0d4703b0d2f998c3f8e981a36384e1aaaf1f0e9b3109d3" gracePeriod=600 Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.958477 4791 generic.go:334] "Generic (PLEG): container finished" podID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerID="5da1cacd53a8f38dcb34edc00b372b437c03ccf5e3ce1774eff8b85370536599" exitCode=0 Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.958745 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6zn4" event={"ID":"d96ce78a-9e9b-4a5a-b536-3904872262e6","Type":"ContainerDied","Data":"5da1cacd53a8f38dcb34edc00b372b437c03ccf5e3ce1774eff8b85370536599"} Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.962179 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="905d580dece9c9ce3b0d4703b0d2f998c3f8e981a36384e1aaaf1f0e9b3109d3" exitCode=0 Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.962242 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"905d580dece9c9ce3b0d4703b0d2f998c3f8e981a36384e1aaaf1f0e9b3109d3"} Dec 08 21:32:05 crc kubenswrapper[4791]: I1208 21:32:05.962407 4791 scope.go:117] "RemoveContainer" containerID="839f3fac6b8b61fb2e046952212eab2b1c2a06973dd6faea1a1740a19a5794d2" Dec 08 21:32:06 crc kubenswrapper[4791]: I1208 21:32:06.970828 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"11eb0a686e8878342818f508b449514d71e01b400661915caf0121820b7c92ca"} Dec 08 21:32:07 crc kubenswrapper[4791]: I1208 21:32:07.981611 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6zn4" event={"ID":"d96ce78a-9e9b-4a5a-b536-3904872262e6","Type":"ContainerStarted","Data":"49ee3099ff6f3c936d1fdd760f4ae3d25996b0bd66424d97318d21d6d9b7d053"} Dec 08 21:32:08 crc kubenswrapper[4791]: I1208 21:32:08.006408 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l6zn4" podStartSLOduration=2.56471398 podStartE2EDuration="7.006388074s" podCreationTimestamp="2025-12-08 21:32:01 +0000 UTC" firstStartedPulling="2025-12-08 21:32:02.938577132 +0000 UTC m=+799.637335477" lastFinishedPulling="2025-12-08 21:32:07.380251226 +0000 UTC m=+804.079009571" observedRunningTime="2025-12-08 21:32:08.0017697 +0000 UTC m=+804.700528055" watchObservedRunningTime="2025-12-08 21:32:08.006388074 +0000 UTC m=+804.705146419" Dec 08 21:32:11 crc kubenswrapper[4791]: I1208 21:32:11.618450 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:11 crc kubenswrapper[4791]: I1208 21:32:11.619041 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:11 crc kubenswrapper[4791]: I1208 21:32:11.670689 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:12 crc kubenswrapper[4791]: I1208 21:32:12.094308 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:12 crc kubenswrapper[4791]: I1208 21:32:12.149829 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l6zn4"] Dec 08 21:32:14 crc kubenswrapper[4791]: I1208 21:32:14.039253 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l6zn4" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="registry-server" containerID="cri-o://49ee3099ff6f3c936d1fdd760f4ae3d25996b0bd66424d97318d21d6d9b7d053" gracePeriod=2 Dec 08 21:32:15 crc kubenswrapper[4791]: I1208 21:32:15.806901 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5"] Dec 08 21:32:15 crc kubenswrapper[4791]: I1208 21:32:15.808567 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:15 crc kubenswrapper[4791]: I1208 21:32:15.810495 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:32:15 crc kubenswrapper[4791]: I1208 21:32:15.816357 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5"] Dec 08 21:32:15 crc kubenswrapper[4791]: I1208 21:32:15.908895 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:15 crc kubenswrapper[4791]: I1208 21:32:15.909022 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk7sf\" (UniqueName: \"kubernetes.io/projected/a292e97f-d0e0-47ea-8f22-77915dd393f3-kube-api-access-wk7sf\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:15 crc kubenswrapper[4791]: I1208 21:32:15.909132 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.010205 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.010294 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.010329 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk7sf\" (UniqueName: \"kubernetes.io/projected/a292e97f-d0e0-47ea-8f22-77915dd393f3-kube-api-access-wk7sf\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.011201 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.011520 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.039251 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk7sf\" (UniqueName: \"kubernetes.io/projected/a292e97f-d0e0-47ea-8f22-77915dd393f3-kube-api-access-wk7sf\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.099326 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6"] Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.100613 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.121138 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6"] Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.126104 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.215819 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.215901 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf9qk\" (UniqueName: \"kubernetes.io/projected/0bb70a12-b198-4377-906b-8036ff49d91c-kube-api-access-hf9qk\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.215943 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.316966 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf9qk\" (UniqueName: \"kubernetes.io/projected/0bb70a12-b198-4377-906b-8036ff49d91c-kube-api-access-hf9qk\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.317054 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.317148 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.317830 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.318144 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.339654 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf9qk\" (UniqueName: \"kubernetes.io/projected/0bb70a12-b198-4377-906b-8036ff49d91c-kube-api-access-hf9qk\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.383326 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5"] Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.416762 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:16 crc kubenswrapper[4791]: I1208 21:32:16.825666 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6"] Dec 08 21:32:16 crc kubenswrapper[4791]: W1208 21:32:16.831679 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0bb70a12_b198_4377_906b_8036ff49d91c.slice/crio-6b98d8b2dbe116144036010669cd5a50743bb9ed33d49fd174802dff49e79b7c WatchSource:0}: Error finding container 6b98d8b2dbe116144036010669cd5a50743bb9ed33d49fd174802dff49e79b7c: Status 404 returned error can't find the container with id 6b98d8b2dbe116144036010669cd5a50743bb9ed33d49fd174802dff49e79b7c Dec 08 21:32:17 crc kubenswrapper[4791]: I1208 21:32:17.063273 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerStarted","Data":"138f328c27a81ea587f31822218353e2e15578391858ad13aeb273231a71dee2"} Dec 08 21:32:17 crc kubenswrapper[4791]: I1208 21:32:17.065853 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" event={"ID":"0bb70a12-b198-4377-906b-8036ff49d91c","Type":"ContainerStarted","Data":"6b98d8b2dbe116144036010669cd5a50743bb9ed33d49fd174802dff49e79b7c"} Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.098296 4791 generic.go:334] "Generic (PLEG): container finished" podID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerID="49ee3099ff6f3c936d1fdd760f4ae3d25996b0bd66424d97318d21d6d9b7d053" exitCode=0 Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.098385 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6zn4" event={"ID":"d96ce78a-9e9b-4a5a-b536-3904872262e6","Type":"ContainerDied","Data":"49ee3099ff6f3c936d1fdd760f4ae3d25996b0bd66424d97318d21d6d9b7d053"} Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.115354 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerStarted","Data":"f2b708e05e1e560a5b40a67a22a4c6766b285163697478349997185e7d336c9a"} Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.130188 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" event={"ID":"0bb70a12-b198-4377-906b-8036ff49d91c","Type":"ContainerStarted","Data":"ac49a0863eb82837c0703afc28f8c55efb3af9415a87b68e9ab9f91df75b5590"} Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.893781 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.966618 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tm5w\" (UniqueName: \"kubernetes.io/projected/d96ce78a-9e9b-4a5a-b536-3904872262e6-kube-api-access-9tm5w\") pod \"d96ce78a-9e9b-4a5a-b536-3904872262e6\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.966874 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-utilities\") pod \"d96ce78a-9e9b-4a5a-b536-3904872262e6\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.966908 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-catalog-content\") pod \"d96ce78a-9e9b-4a5a-b536-3904872262e6\" (UID: \"d96ce78a-9e9b-4a5a-b536-3904872262e6\") " Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.968168 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-utilities" (OuterVolumeSpecName: "utilities") pod "d96ce78a-9e9b-4a5a-b536-3904872262e6" (UID: "d96ce78a-9e9b-4a5a-b536-3904872262e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:18 crc kubenswrapper[4791]: I1208 21:32:18.975909 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d96ce78a-9e9b-4a5a-b536-3904872262e6-kube-api-access-9tm5w" (OuterVolumeSpecName: "kube-api-access-9tm5w") pod "d96ce78a-9e9b-4a5a-b536-3904872262e6" (UID: "d96ce78a-9e9b-4a5a-b536-3904872262e6"). InnerVolumeSpecName "kube-api-access-9tm5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.021750 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d96ce78a-9e9b-4a5a-b536-3904872262e6" (UID: "d96ce78a-9e9b-4a5a-b536-3904872262e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.068968 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.069010 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96ce78a-9e9b-4a5a-b536-3904872262e6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.069021 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tm5w\" (UniqueName: \"kubernetes.io/projected/d96ce78a-9e9b-4a5a-b536-3904872262e6-kube-api-access-9tm5w\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.140032 4791 generic.go:334] "Generic (PLEG): container finished" podID="0bb70a12-b198-4377-906b-8036ff49d91c" containerID="ac49a0863eb82837c0703afc28f8c55efb3af9415a87b68e9ab9f91df75b5590" exitCode=0 Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.140994 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" event={"ID":"0bb70a12-b198-4377-906b-8036ff49d91c","Type":"ContainerDied","Data":"ac49a0863eb82837c0703afc28f8c55efb3af9415a87b68e9ab9f91df75b5590"} Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.145284 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6zn4" event={"ID":"d96ce78a-9e9b-4a5a-b536-3904872262e6","Type":"ContainerDied","Data":"b9e6853e11e4a36c5ce1cdbd1febc4f3d9cf23071d197794234fbdf751a2cf0f"} Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.145369 4791 scope.go:117] "RemoveContainer" containerID="49ee3099ff6f3c936d1fdd760f4ae3d25996b0bd66424d97318d21d6d9b7d053" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.145499 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6zn4" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.147038 4791 generic.go:334] "Generic (PLEG): container finished" podID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerID="f2b708e05e1e560a5b40a67a22a4c6766b285163697478349997185e7d336c9a" exitCode=0 Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.147087 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerDied","Data":"f2b708e05e1e560a5b40a67a22a4c6766b285163697478349997185e7d336c9a"} Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.163783 4791 scope.go:117] "RemoveContainer" containerID="5da1cacd53a8f38dcb34edc00b372b437c03ccf5e3ce1774eff8b85370536599" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.193339 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l6zn4"] Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.198488 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l6zn4"] Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.218596 4791 scope.go:117] "RemoveContainer" containerID="4b09d8b6bf761eacb7822b7cc2f854821c342509c15882acfb5839010710a454" Dec 08 21:32:19 crc kubenswrapper[4791]: I1208 21:32:19.609056 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" path="/var/lib/kubelet/pods/d96ce78a-9e9b-4a5a-b536-3904872262e6/volumes" Dec 08 21:32:22 crc kubenswrapper[4791]: I1208 21:32:22.170366 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerStarted","Data":"8e0f2a089b3e92bf7c18bbb464bb3dd55e9adb9c8f9aa7149860e48f0af1c669"} Dec 08 21:32:23 crc kubenswrapper[4791]: I1208 21:32:23.178443 4791 generic.go:334] "Generic (PLEG): container finished" podID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerID="8e0f2a089b3e92bf7c18bbb464bb3dd55e9adb9c8f9aa7149860e48f0af1c669" exitCode=0 Dec 08 21:32:23 crc kubenswrapper[4791]: I1208 21:32:23.178549 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerDied","Data":"8e0f2a089b3e92bf7c18bbb464bb3dd55e9adb9c8f9aa7149860e48f0af1c669"} Dec 08 21:32:24 crc kubenswrapper[4791]: I1208 21:32:24.192419 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerStarted","Data":"164238efc5641961158ae8472d26f94112bfe86d04631906aa1c8919a67ab6f9"} Dec 08 21:32:24 crc kubenswrapper[4791]: I1208 21:32:24.195563 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" event={"ID":"0bb70a12-b198-4377-906b-8036ff49d91c","Type":"ContainerStarted","Data":"38d5a4792d1da6c3ad6034fe4c7f50f9f3480088081d06e335d8649860de4237"} Dec 08 21:32:24 crc kubenswrapper[4791]: I1208 21:32:24.234179 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" podStartSLOduration=7.070504648 podStartE2EDuration="9.234162668s" podCreationTimestamp="2025-12-08 21:32:15 +0000 UTC" firstStartedPulling="2025-12-08 21:32:19.148506548 +0000 UTC m=+815.847264893" lastFinishedPulling="2025-12-08 21:32:21.312164568 +0000 UTC m=+818.010922913" observedRunningTime="2025-12-08 21:32:24.230234151 +0000 UTC m=+820.928992496" watchObservedRunningTime="2025-12-08 21:32:24.234162668 +0000 UTC m=+820.932921013" Dec 08 21:32:25 crc kubenswrapper[4791]: I1208 21:32:25.203938 4791 generic.go:334] "Generic (PLEG): container finished" podID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerID="164238efc5641961158ae8472d26f94112bfe86d04631906aa1c8919a67ab6f9" exitCode=0 Dec 08 21:32:25 crc kubenswrapper[4791]: I1208 21:32:25.205550 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerDied","Data":"164238efc5641961158ae8472d26f94112bfe86d04631906aa1c8919a67ab6f9"} Dec 08 21:32:25 crc kubenswrapper[4791]: I1208 21:32:25.207831 4791 generic.go:334] "Generic (PLEG): container finished" podID="0bb70a12-b198-4377-906b-8036ff49d91c" containerID="38d5a4792d1da6c3ad6034fe4c7f50f9f3480088081d06e335d8649860de4237" exitCode=0 Dec 08 21:32:25 crc kubenswrapper[4791]: I1208 21:32:25.207888 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" event={"ID":"0bb70a12-b198-4377-906b-8036ff49d91c","Type":"ContainerDied","Data":"38d5a4792d1da6c3ad6034fe4c7f50f9f3480088081d06e335d8649860de4237"} Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.219093 4791 generic.go:334] "Generic (PLEG): container finished" podID="0bb70a12-b198-4377-906b-8036ff49d91c" containerID="e5547543d3bf4f8daeccfba95f788aae22edfd1facdf3835880064b24f64b5ca" exitCode=0 Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.219195 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" event={"ID":"0bb70a12-b198-4377-906b-8036ff49d91c","Type":"ContainerDied","Data":"e5547543d3bf4f8daeccfba95f788aae22edfd1facdf3835880064b24f64b5ca"} Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.447898 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.567982 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-util\") pod \"a292e97f-d0e0-47ea-8f22-77915dd393f3\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.568058 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-bundle\") pod \"a292e97f-d0e0-47ea-8f22-77915dd393f3\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.568093 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk7sf\" (UniqueName: \"kubernetes.io/projected/a292e97f-d0e0-47ea-8f22-77915dd393f3-kube-api-access-wk7sf\") pod \"a292e97f-d0e0-47ea-8f22-77915dd393f3\" (UID: \"a292e97f-d0e0-47ea-8f22-77915dd393f3\") " Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.569339 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-bundle" (OuterVolumeSpecName: "bundle") pod "a292e97f-d0e0-47ea-8f22-77915dd393f3" (UID: "a292e97f-d0e0-47ea-8f22-77915dd393f3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.573952 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a292e97f-d0e0-47ea-8f22-77915dd393f3-kube-api-access-wk7sf" (OuterVolumeSpecName: "kube-api-access-wk7sf") pod "a292e97f-d0e0-47ea-8f22-77915dd393f3" (UID: "a292e97f-d0e0-47ea-8f22-77915dd393f3"). InnerVolumeSpecName "kube-api-access-wk7sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.580137 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-util" (OuterVolumeSpecName: "util") pod "a292e97f-d0e0-47ea-8f22-77915dd393f3" (UID: "a292e97f-d0e0-47ea-8f22-77915dd393f3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.669308 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.669828 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a292e97f-d0e0-47ea-8f22-77915dd393f3-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:26 crc kubenswrapper[4791]: I1208 21:32:26.669916 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk7sf\" (UniqueName: \"kubernetes.io/projected/a292e97f-d0e0-47ea-8f22-77915dd393f3-kube-api-access-wk7sf\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.229291 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" event={"ID":"a292e97f-d0e0-47ea-8f22-77915dd393f3","Type":"ContainerDied","Data":"138f328c27a81ea587f31822218353e2e15578391858ad13aeb273231a71dee2"} Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.231372 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="138f328c27a81ea587f31822218353e2e15578391858ad13aeb273231a71dee2" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.229361 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.449761 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.482947 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-bundle\") pod \"0bb70a12-b198-4377-906b-8036ff49d91c\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.482993 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-util\") pod \"0bb70a12-b198-4377-906b-8036ff49d91c\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.483077 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf9qk\" (UniqueName: \"kubernetes.io/projected/0bb70a12-b198-4377-906b-8036ff49d91c-kube-api-access-hf9qk\") pod \"0bb70a12-b198-4377-906b-8036ff49d91c\" (UID: \"0bb70a12-b198-4377-906b-8036ff49d91c\") " Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.484428 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-bundle" (OuterVolumeSpecName: "bundle") pod "0bb70a12-b198-4377-906b-8036ff49d91c" (UID: "0bb70a12-b198-4377-906b-8036ff49d91c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.491040 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bb70a12-b198-4377-906b-8036ff49d91c-kube-api-access-hf9qk" (OuterVolumeSpecName: "kube-api-access-hf9qk") pod "0bb70a12-b198-4377-906b-8036ff49d91c" (UID: "0bb70a12-b198-4377-906b-8036ff49d91c"). InnerVolumeSpecName "kube-api-access-hf9qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.495607 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-util" (OuterVolumeSpecName: "util") pod "0bb70a12-b198-4377-906b-8036ff49d91c" (UID: "0bb70a12-b198-4377-906b-8036ff49d91c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.584612 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf9qk\" (UniqueName: \"kubernetes.io/projected/0bb70a12-b198-4377-906b-8036ff49d91c-kube-api-access-hf9qk\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.584657 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:27 crc kubenswrapper[4791]: I1208 21:32:27.584667 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bb70a12-b198-4377-906b-8036ff49d91c-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:28 crc kubenswrapper[4791]: I1208 21:32:28.236994 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" event={"ID":"0bb70a12-b198-4377-906b-8036ff49d91c","Type":"ContainerDied","Data":"6b98d8b2dbe116144036010669cd5a50743bb9ed33d49fd174802dff49e79b7c"} Dec 08 21:32:28 crc kubenswrapper[4791]: I1208 21:32:28.237035 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b98d8b2dbe116144036010669cd5a50743bb9ed33d49fd174802dff49e79b7c" Dec 08 21:32:28 crc kubenswrapper[4791]: I1208 21:32:28.237078 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.717976 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-qvvzh"] Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718501 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bb70a12-b198-4377-906b-8036ff49d91c" containerName="pull" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718514 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bb70a12-b198-4377-906b-8036ff49d91c" containerName="pull" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718528 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="extract-utilities" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718534 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="extract-utilities" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718547 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="extract-content" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718553 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="extract-content" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718565 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerName="extract" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718572 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerName="extract" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718587 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bb70a12-b198-4377-906b-8036ff49d91c" containerName="extract" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718592 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bb70a12-b198-4377-906b-8036ff49d91c" containerName="extract" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718604 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bb70a12-b198-4377-906b-8036ff49d91c" containerName="util" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718610 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bb70a12-b198-4377-906b-8036ff49d91c" containerName="util" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718617 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerName="util" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718623 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerName="util" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718636 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="registry-server" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718642 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="registry-server" Dec 08 21:32:31 crc kubenswrapper[4791]: E1208 21:32:31.718653 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerName="pull" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718658 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerName="pull" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718785 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="a292e97f-d0e0-47ea-8f22-77915dd393f3" containerName="extract" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718795 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bb70a12-b198-4377-906b-8036ff49d91c" containerName="extract" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.718806 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96ce78a-9e9b-4a5a-b536-3904872262e6" containerName="registry-server" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.719265 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.726072 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.726224 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-xdvkc" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.726539 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.736343 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-qvvzh"] Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.772857 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj98p\" (UniqueName: \"kubernetes.io/projected/1b8008a0-6001-445e-8da9-0c0f43ed3877-kube-api-access-rj98p\") pod \"cluster-logging-operator-ff9846bd-qvvzh\" (UID: \"1b8008a0-6001-445e-8da9-0c0f43ed3877\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.874889 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj98p\" (UniqueName: \"kubernetes.io/projected/1b8008a0-6001-445e-8da9-0c0f43ed3877-kube-api-access-rj98p\") pod \"cluster-logging-operator-ff9846bd-qvvzh\" (UID: \"1b8008a0-6001-445e-8da9-0c0f43ed3877\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" Dec 08 21:32:31 crc kubenswrapper[4791]: I1208 21:32:31.891891 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj98p\" (UniqueName: \"kubernetes.io/projected/1b8008a0-6001-445e-8da9-0c0f43ed3877-kube-api-access-rj98p\") pod \"cluster-logging-operator-ff9846bd-qvvzh\" (UID: \"1b8008a0-6001-445e-8da9-0c0f43ed3877\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" Dec 08 21:32:32 crc kubenswrapper[4791]: I1208 21:32:32.034489 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" Dec 08 21:32:32 crc kubenswrapper[4791]: I1208 21:32:32.259759 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-qvvzh"] Dec 08 21:32:32 crc kubenswrapper[4791]: W1208 21:32:32.265281 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b8008a0_6001_445e_8da9_0c0f43ed3877.slice/crio-b82086ef16e3c9666444f6b7223f76c0cced8cc8b02e48e40e1130c13eafea0b WatchSource:0}: Error finding container b82086ef16e3c9666444f6b7223f76c0cced8cc8b02e48e40e1130c13eafea0b: Status 404 returned error can't find the container with id b82086ef16e3c9666444f6b7223f76c0cced8cc8b02e48e40e1130c13eafea0b Dec 08 21:32:33 crc kubenswrapper[4791]: I1208 21:32:33.267380 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" event={"ID":"1b8008a0-6001-445e-8da9-0c0f43ed3877","Type":"ContainerStarted","Data":"b82086ef16e3c9666444f6b7223f76c0cced8cc8b02e48e40e1130c13eafea0b"} Dec 08 21:32:40 crc kubenswrapper[4791]: I1208 21:32:40.324133 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" event={"ID":"1b8008a0-6001-445e-8da9-0c0f43ed3877","Type":"ContainerStarted","Data":"b0517aa73b8459e677b84935894339084f38957e669647224ec840b08fdf0c73"} Dec 08 21:32:40 crc kubenswrapper[4791]: I1208 21:32:40.344532 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-ff9846bd-qvvzh" podStartSLOduration=2.054524231 podStartE2EDuration="9.344504926s" podCreationTimestamp="2025-12-08 21:32:31 +0000 UTC" firstStartedPulling="2025-12-08 21:32:32.267151735 +0000 UTC m=+828.965910080" lastFinishedPulling="2025-12-08 21:32:39.55713243 +0000 UTC m=+836.255890775" observedRunningTime="2025-12-08 21:32:40.340313153 +0000 UTC m=+837.039071498" watchObservedRunningTime="2025-12-08 21:32:40.344504926 +0000 UTC m=+837.043263271" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.122519 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd"] Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.123798 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.130396 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-8585j" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.130734 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.131415 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.132114 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.132161 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.132303 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.149867 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd"] Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.291057 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/0c11badb-ae6f-4efe-9d82-80545108b777-manager-config\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.291112 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-webhook-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.291163 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-apiservice-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.291254 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tl6f\" (UniqueName: \"kubernetes.io/projected/0c11badb-ae6f-4efe-9d82-80545108b777-kube-api-access-6tl6f\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.291281 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.393511 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tl6f\" (UniqueName: \"kubernetes.io/projected/0c11badb-ae6f-4efe-9d82-80545108b777-kube-api-access-6tl6f\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.393559 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.393591 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/0c11badb-ae6f-4efe-9d82-80545108b777-manager-config\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.393614 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-webhook-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.393656 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-apiservice-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.395398 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/0c11badb-ae6f-4efe-9d82-80545108b777-manager-config\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.410820 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.413459 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-webhook-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.414216 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0c11badb-ae6f-4efe-9d82-80545108b777-apiservice-cert\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.439446 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tl6f\" (UniqueName: \"kubernetes.io/projected/0c11badb-ae6f-4efe-9d82-80545108b777-kube-api-access-6tl6f\") pod \"loki-operator-controller-manager-7b4df4946c-d67zd\" (UID: \"0c11badb-ae6f-4efe-9d82-80545108b777\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:43 crc kubenswrapper[4791]: I1208 21:32:43.443072 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:44 crc kubenswrapper[4791]: I1208 21:32:44.072913 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd"] Dec 08 21:32:44 crc kubenswrapper[4791]: W1208 21:32:44.079523 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c11badb_ae6f_4efe_9d82_80545108b777.slice/crio-005ebf4eb11437bfa6fc0cb6b044086c3c5d5c42600bc5deba3c926bbaa4e71d WatchSource:0}: Error finding container 005ebf4eb11437bfa6fc0cb6b044086c3c5d5c42600bc5deba3c926bbaa4e71d: Status 404 returned error can't find the container with id 005ebf4eb11437bfa6fc0cb6b044086c3c5d5c42600bc5deba3c926bbaa4e71d Dec 08 21:32:44 crc kubenswrapper[4791]: I1208 21:32:44.355933 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" event={"ID":"0c11badb-ae6f-4efe-9d82-80545108b777","Type":"ContainerStarted","Data":"005ebf4eb11437bfa6fc0cb6b044086c3c5d5c42600bc5deba3c926bbaa4e71d"} Dec 08 21:32:48 crc kubenswrapper[4791]: I1208 21:32:48.383558 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" event={"ID":"0c11badb-ae6f-4efe-9d82-80545108b777","Type":"ContainerStarted","Data":"3b6333d6eac1b39d9d3001c5c1366e800e7548d0c109bd905b7397463eed6193"} Dec 08 21:32:54 crc kubenswrapper[4791]: I1208 21:32:54.431250 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" event={"ID":"0c11badb-ae6f-4efe-9d82-80545108b777","Type":"ContainerStarted","Data":"2b4975e3266ad98b2a99746f6e98b07a43023347b8111d02feda9aca52006eb7"} Dec 08 21:32:54 crc kubenswrapper[4791]: I1208 21:32:54.431934 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:54 crc kubenswrapper[4791]: I1208 21:32:54.441694 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" Dec 08 21:32:54 crc kubenswrapper[4791]: I1208 21:32:54.461560 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-7b4df4946c-d67zd" podStartSLOduration=1.495696321 podStartE2EDuration="11.461542966s" podCreationTimestamp="2025-12-08 21:32:43 +0000 UTC" firstStartedPulling="2025-12-08 21:32:44.082954899 +0000 UTC m=+840.781713244" lastFinishedPulling="2025-12-08 21:32:54.048801554 +0000 UTC m=+850.747559889" observedRunningTime="2025-12-08 21:32:54.460503801 +0000 UTC m=+851.159262146" watchObservedRunningTime="2025-12-08 21:32:54.461542966 +0000 UTC m=+851.160301311" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.489811 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.492157 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.495404 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.496014 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.496490 4791 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-vln9w" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.498284 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.569649 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc9tn\" (UniqueName: \"kubernetes.io/projected/33fbb737-dfb2-4d55-b6d8-75ac0aed5493-kube-api-access-sc9tn\") pod \"minio\" (UID: \"33fbb737-dfb2-4d55-b6d8-75ac0aed5493\") " pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.569830 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\") pod \"minio\" (UID: \"33fbb737-dfb2-4d55-b6d8-75ac0aed5493\") " pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.671944 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\") pod \"minio\" (UID: \"33fbb737-dfb2-4d55-b6d8-75ac0aed5493\") " pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.672116 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc9tn\" (UniqueName: \"kubernetes.io/projected/33fbb737-dfb2-4d55-b6d8-75ac0aed5493-kube-api-access-sc9tn\") pod \"minio\" (UID: \"33fbb737-dfb2-4d55-b6d8-75ac0aed5493\") " pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.677088 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.677126 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\") pod \"minio\" (UID: \"33fbb737-dfb2-4d55-b6d8-75ac0aed5493\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3258cf267719620fa6b2cea7debef78cc3f14e954e1451fa267109e6384310c4/globalmount\"" pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.689889 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc9tn\" (UniqueName: \"kubernetes.io/projected/33fbb737-dfb2-4d55-b6d8-75ac0aed5493-kube-api-access-sc9tn\") pod \"minio\" (UID: \"33fbb737-dfb2-4d55-b6d8-75ac0aed5493\") " pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.701582 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0be5cd39-0f01-4c8c-ad0b-df9e402ac7b8\") pod \"minio\" (UID: \"33fbb737-dfb2-4d55-b6d8-75ac0aed5493\") " pod="minio-dev/minio" Dec 08 21:33:00 crc kubenswrapper[4791]: I1208 21:33:00.875606 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 08 21:33:01 crc kubenswrapper[4791]: I1208 21:33:01.344605 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 08 21:33:01 crc kubenswrapper[4791]: I1208 21:33:01.476514 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"33fbb737-dfb2-4d55-b6d8-75ac0aed5493","Type":"ContainerStarted","Data":"e4ead41ed1b8ab0e3cfc822770ba096822c0e9b56f447ef47303bfad19090520"} Dec 08 21:33:16 crc kubenswrapper[4791]: I1208 21:33:16.566721 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"33fbb737-dfb2-4d55-b6d8-75ac0aed5493","Type":"ContainerStarted","Data":"1fc3efd33abe4dbf592a043bc236053bd6a0495c95294e58ddf62a4d09306fed"} Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.358673 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=8.797556298 podStartE2EDuration="23.358654812s" podCreationTimestamp="2025-12-08 21:32:58 +0000 UTC" firstStartedPulling="2025-12-08 21:33:01.363483279 +0000 UTC m=+858.062241624" lastFinishedPulling="2025-12-08 21:33:15.924581783 +0000 UTC m=+872.623340138" observedRunningTime="2025-12-08 21:33:16.587866976 +0000 UTC m=+873.286625321" watchObservedRunningTime="2025-12-08 21:33:21.358654812 +0000 UTC m=+878.057413157" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.360981 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-nbw68"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.362045 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.364357 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-http" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.364473 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-ca-bundle" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.364784 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-grpc" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.364824 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-config" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.364908 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-dockercfg-xpdlc" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.369921 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-nbw68"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.489948 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.490667 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.491139 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.491260 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9brmr\" (UniqueName: \"kubernetes.io/projected/ca262116-be5e-42bb-b68e-5d96c476628a-kube-api-access-9brmr\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.491345 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca262116-be5e-42bb-b68e-5d96c476628a-config\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.531254 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-jbrkm"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.532264 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.536819 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-grpc" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.537254 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-http" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.543891 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-s3" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.558720 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-jbrkm"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.592487 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.592535 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.592599 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.592650 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9brmr\" (UniqueName: \"kubernetes.io/projected/ca262116-be5e-42bb-b68e-5d96c476628a-kube-api-access-9brmr\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.592678 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca262116-be5e-42bb-b68e-5d96c476628a-config\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.595641 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.596505 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca262116-be5e-42bb-b68e-5d96c476628a-config\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.603282 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.603603 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/ca262116-be5e-42bb-b68e-5d96c476628a-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.625518 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.626937 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.628295 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9brmr\" (UniqueName: \"kubernetes.io/projected/ca262116-be5e-42bb-b68e-5d96c476628a-kube-api-access-9brmr\") pod \"logging-loki-distributor-76cc67bf56-nbw68\" (UID: \"ca262116-be5e-42bb-b68e-5d96c476628a\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.631673 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-grpc" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.631922 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-http" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.640973 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.682528 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.696483 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.696551 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwfp8\" (UniqueName: \"kubernetes.io/projected/02e28803-31d3-4093-892d-cb6ae8ca37a0-kube-api-access-rwfp8\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.696580 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.696661 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.696726 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02e28803-31d3-4093-892d-cb6ae8ca37a0-config\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.696755 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798010 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798058 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798091 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798114 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwfp8\" (UniqueName: \"kubernetes.io/projected/02e28803-31d3-4093-892d-cb6ae8ca37a0-kube-api-access-rwfp8\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798133 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798189 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798216 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798234 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmdqx\" (UniqueName: \"kubernetes.io/projected/544545f2-67f8-4bb2-8287-644c13874f93-kube-api-access-dmdqx\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798256 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02e28803-31d3-4093-892d-cb6ae8ca37a0-config\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798273 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798267 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-868b848d6f-twnll"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.799221 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.798288 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544545f2-67f8-4bb2-8287-644c13874f93-config\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.799480 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.800943 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02e28803-31d3-4093-892d-cb6ae8ca37a0-config\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.804089 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.807797 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway-ca-bundle" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.807872 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.807914 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-http" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.807868 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.807796 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.809697 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-client-http" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.811531 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/02e28803-31d3-4093-892d-cb6ae8ca37a0-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.849925 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwfp8\" (UniqueName: \"kubernetes.io/projected/02e28803-31d3-4093-892d-cb6ae8ca37a0-kube-api-access-rwfp8\") pod \"logging-loki-querier-5895d59bb8-jbrkm\" (UID: \"02e28803-31d3-4093-892d-cb6ae8ca37a0\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.853982 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-868b848d6f-twnll"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.862637 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-868b848d6f-mk4cx"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.864027 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.885289 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-dockercfg-p46q9" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.900590 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-lokistack-gateway\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.900912 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.900940 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.900961 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmdqx\" (UniqueName: \"kubernetes.io/projected/544545f2-67f8-4bb2-8287-644c13874f93-kube-api-access-dmdqx\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.900978 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbx26\" (UniqueName: \"kubernetes.io/projected/4d4e77a7-b71b-466f-8964-b1c4257c7c79-kube-api-access-nbx26\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901000 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544545f2-67f8-4bb2-8287-644c13874f93-config\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901030 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tenants\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901065 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901096 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901134 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901168 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tls-secret\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901190 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-rbac\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.901207 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.902604 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-868b848d6f-mk4cx"] Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.908589 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.908836 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544545f2-67f8-4bb2-8287-644c13874f93-config\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.913472 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.925306 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/544545f2-67f8-4bb2-8287-644c13874f93-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.960978 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmdqx\" (UniqueName: \"kubernetes.io/projected/544545f2-67f8-4bb2-8287-644c13874f93-kube-api-access-dmdqx\") pod \"logging-loki-query-frontend-84558f7c9f-qrzd8\" (UID: \"544545f2-67f8-4bb2-8287-644c13874f93\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:21 crc kubenswrapper[4791]: I1208 21:33:21.984789 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.044835 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4wt6\" (UniqueName: \"kubernetes.io/projected/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-kube-api-access-n4wt6\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.044907 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.044940 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.044978 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbx26\" (UniqueName: \"kubernetes.io/projected/4d4e77a7-b71b-466f-8964-b1c4257c7c79-kube-api-access-nbx26\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045028 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tenants\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045067 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-rbac\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045134 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tls-secret\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045167 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045259 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045302 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-lokistack-gateway\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045373 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tls-secret\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045410 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tenants\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045452 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-rbac\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045487 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045542 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.045595 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-lokistack-gateway\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: E1208 21:33:22.047298 4791 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Dec 08 21:33:22 crc kubenswrapper[4791]: E1208 21:33:22.047414 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tls-secret podName:4d4e77a7-b71b-466f-8964-b1c4257c7c79 nodeName:}" failed. No retries permitted until 2025-12-08 21:33:22.547396189 +0000 UTC m=+879.246154534 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tls-secret") pod "logging-loki-gateway-868b848d6f-twnll" (UID: "4d4e77a7-b71b-466f-8964-b1c4257c7c79") : secret "logging-loki-gateway-http" not found Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.047428 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-lokistack-gateway\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.048140 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-rbac\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.048454 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: E1208 21:33:22.048471 4791 configmap.go:193] Couldn't get configMap openshift-logging/logging-loki-gateway-ca-bundle: configmap "logging-loki-gateway-ca-bundle" not found Dec 08 21:33:22 crc kubenswrapper[4791]: E1208 21:33:22.048551 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-ca-bundle podName:4d4e77a7-b71b-466f-8964-b1c4257c7c79 nodeName:}" failed. No retries permitted until 2025-12-08 21:33:22.548532107 +0000 UTC m=+879.247290452 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "logging-loki-gateway-ca-bundle" (UniqueName: "kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-ca-bundle") pod "logging-loki-gateway-868b848d6f-twnll" (UID: "4d4e77a7-b71b-466f-8964-b1c4257c7c79") : configmap "logging-loki-gateway-ca-bundle" not found Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.057667 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tenants\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.062988 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.107163 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbx26\" (UniqueName: \"kubernetes.io/projected/4d4e77a7-b71b-466f-8964-b1c4257c7c79-kube-api-access-nbx26\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147163 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-lokistack-gateway\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147234 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tenants\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147270 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147311 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4wt6\" (UniqueName: \"kubernetes.io/projected/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-kube-api-access-n4wt6\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147338 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147382 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-rbac\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147438 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tls-secret\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.147462 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: E1208 21:33:22.155034 4791 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Dec 08 21:33:22 crc kubenswrapper[4791]: E1208 21:33:22.155133 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tls-secret podName:2d8aae18-ca02-4ba1-8b8f-ca028ccea24e nodeName:}" failed. No retries permitted until 2025-12-08 21:33:22.655108264 +0000 UTC m=+879.353866609 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tls-secret") pod "logging-loki-gateway-868b848d6f-mk4cx" (UID: "2d8aae18-ca02-4ba1-8b8f-ca028ccea24e") : secret "logging-loki-gateway-http" not found Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.155245 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.155260 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.155303 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-lokistack-gateway\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.155363 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-rbac\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.155606 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.157822 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.162567 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tenants\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.173233 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4wt6\" (UniqueName: \"kubernetes.io/projected/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-kube-api-access-n4wt6\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.309454 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.387900 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-nbw68"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.513840 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.514924 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.516749 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-http" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.517897 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-grpc" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.529588 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.554336 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.554397 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tls-secret\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.556048 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d4e77a7-b71b-466f-8964-b1c4257c7c79-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.558764 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/4d4e77a7-b71b-466f-8964-b1c4257c7c79-tls-secret\") pod \"logging-loki-gateway-868b848d6f-twnll\" (UID: \"4d4e77a7-b71b-466f-8964-b1c4257c7c79\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.603090 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.604171 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.606283 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-http" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.607885 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" event={"ID":"544545f2-67f8-4bb2-8287-644c13874f93","Type":"ContainerStarted","Data":"f5a4dd40296c59cafa6124c21f7d7739d51a1a45f99dfe3f3c4332cfbd49d449"} Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.608137 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-grpc" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.609479 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" event={"ID":"ca262116-be5e-42bb-b68e-5d96c476628a","Type":"ContainerStarted","Data":"6d22444eccbaf720fc6bfd6d8fb32b92ccb485c04f5b03bacc207058aa6e45fd"} Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.616254 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.655798 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4fsz\" (UniqueName: \"kubernetes.io/projected/542dc8e1-9fec-4080-bd2d-8e51070f73a1-kube-api-access-j4fsz\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.656107 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.656219 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.656519 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.656653 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.656757 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.656871 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.656966 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tls-secret\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.657063 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/542dc8e1-9fec-4080-bd2d-8e51070f73a1-config\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.660825 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2d8aae18-ca02-4ba1-8b8f-ca028ccea24e-tls-secret\") pod \"logging-loki-gateway-868b848d6f-mk4cx\" (UID: \"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e\") " pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.662770 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-jbrkm"] Dec 08 21:33:22 crc kubenswrapper[4791]: W1208 21:33:22.671295 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02e28803_31d3_4093_892d_cb6ae8ca37a0.slice/crio-d0690a7f5f5814d453a7bd839d3e1265351c8f37f38fd7f4871a50f06ad10dab WatchSource:0}: Error finding container d0690a7f5f5814d453a7bd839d3e1265351c8f37f38fd7f4871a50f06ad10dab: Status 404 returned error can't find the container with id d0690a7f5f5814d453a7bd839d3e1265351c8f37f38fd7f4871a50f06ad10dab Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.732239 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.732515 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.733312 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.737573 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-http" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.738349 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-grpc" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.743091 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765273 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765320 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj468\" (UniqueName: \"kubernetes.io/projected/3f7c0b21-8653-4e60-9e31-48c491c92f1f-kube-api-access-zj468\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765352 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4fsz\" (UniqueName: \"kubernetes.io/projected/542dc8e1-9fec-4080-bd2d-8e51070f73a1-kube-api-access-j4fsz\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765369 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765396 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765416 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765440 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ba0901d9-81a0-41a6-9da1-99037160f721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba0901d9-81a0-41a6-9da1-99037160f721\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765469 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765493 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765533 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765552 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f7c0b21-8653-4e60-9e31-48c491c92f1f-config\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765575 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765593 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765910 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/542dc8e1-9fec-4080-bd2d-8e51070f73a1-config\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.765945 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.767795 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.768650 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/542dc8e1-9fec-4080-bd2d-8e51070f73a1-config\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.771181 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.772290 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.774465 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.774511 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4f16824b91c2d9d86c7e254e2e7a5980c0f7c11dc8c18e0915c938a9d87a6edf/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.774678 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.774724 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5bfca31ad2b789933abbc7c4e112decd3339521b5a9d2c8fc52a768064af3bf7/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.779431 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/542dc8e1-9fec-4080-bd2d-8e51070f73a1-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.783994 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4fsz\" (UniqueName: \"kubernetes.io/projected/542dc8e1-9fec-4080-bd2d-8e51070f73a1-kube-api-access-j4fsz\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.813589 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0d8102b6-bcdb-49b6-96ed-9c843eb0151b\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.815382 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08c7e9fc-cf4d-4e2c-9f51-596dfaa79b0c\") pod \"logging-loki-ingester-0\" (UID: \"542dc8e1-9fec-4080-bd2d-8e51070f73a1\") " pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.825537 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.834063 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867504 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867543 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj468\" (UniqueName: \"kubernetes.io/projected/3f7c0b21-8653-4e60-9e31-48c491c92f1f-kube-api-access-zj468\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867569 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vljcz\" (UniqueName: \"kubernetes.io/projected/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-kube-api-access-vljcz\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867604 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-config\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867634 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f7c0b21-8653-4e60-9e31-48c491c92f1f-config\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867660 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867691 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867731 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867750 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867771 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ba0901d9-81a0-41a6-9da1-99037160f721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba0901d9-81a0-41a6-9da1-99037160f721\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867794 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867810 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867851 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.867873 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.868941 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f7c0b21-8653-4e60-9e31-48c491c92f1f-config\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.868961 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.871103 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.871141 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ba0901d9-81a0-41a6-9da1-99037160f721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba0901d9-81a0-41a6-9da1-99037160f721\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d654114eb40ae1b4d3526ecc67580a7dfd935ce1cec25d0b2f6941907e30def3/globalmount\"" pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.872439 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.872914 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.885111 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/3f7c0b21-8653-4e60-9e31-48c491c92f1f-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.887879 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj468\" (UniqueName: \"kubernetes.io/projected/3f7c0b21-8653-4e60-9e31-48c491c92f1f-kube-api-access-zj468\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.913477 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ba0901d9-81a0-41a6-9da1-99037160f721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba0901d9-81a0-41a6-9da1-99037160f721\") pod \"logging-loki-compactor-0\" (UID: \"3f7c0b21-8653-4e60-9e31-48c491c92f1f\") " pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.965198 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.969006 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.969064 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.969100 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.969161 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.969193 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.969240 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vljcz\" (UniqueName: \"kubernetes.io/projected/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-kube-api-access-vljcz\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.969284 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-config\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.971198 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-config\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.971198 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.974499 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.974563 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/da6303b8a165c39e77bcb60662b6cc909c1754f58031d17231d9d70d1adb31b4/globalmount\"" pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.976073 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.977579 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.978462 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:22 crc kubenswrapper[4791]: I1208 21:33:22.990559 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vljcz\" (UniqueName: \"kubernetes.io/projected/2097a8a1-e800-4cf3-89bc-8f540d7e6c3a-kube-api-access-vljcz\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.007040 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c366e088-b2e7-4421-afee-fbf4b255e77a\") pod \"logging-loki-index-gateway-0\" (UID: \"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.099934 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.174389 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-868b848d6f-twnll"] Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.274237 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-868b848d6f-mk4cx"] Dec 08 21:33:23 crc kubenswrapper[4791]: W1208 21:33:23.323677 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d8aae18_ca02_4ba1_8b8f_ca028ccea24e.slice/crio-e3be68ed713b2972380c1f8a5037cc29ebf2ae8faaee4d9a63a7ecf7001e7479 WatchSource:0}: Error finding container e3be68ed713b2972380c1f8a5037cc29ebf2ae8faaee4d9a63a7ecf7001e7479: Status 404 returned error can't find the container with id e3be68ed713b2972380c1f8a5037cc29ebf2ae8faaee4d9a63a7ecf7001e7479 Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.363902 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.376478 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 08 21:33:23 crc kubenswrapper[4791]: W1208 21:33:23.380897 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2097a8a1_e800_4cf3_89bc_8f540d7e6c3a.slice/crio-65b245d9c108b9c568f71f531ad75685f4f5f087eaad965730f3f99ab9b25b0a WatchSource:0}: Error finding container 65b245d9c108b9c568f71f531ad75685f4f5f087eaad965730f3f99ab9b25b0a: Status 404 returned error can't find the container with id 65b245d9c108b9c568f71f531ad75685f4f5f087eaad965730f3f99ab9b25b0a Dec 08 21:33:23 crc kubenswrapper[4791]: W1208 21:33:23.386447 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod542dc8e1_9fec_4080_bd2d_8e51070f73a1.slice/crio-31e623365300b74f7a6ce266a5f65ca1c0afaf3b7931c6cf4f8ec234b60c4dc8 WatchSource:0}: Error finding container 31e623365300b74f7a6ce266a5f65ca1c0afaf3b7931c6cf4f8ec234b60c4dc8: Status 404 returned error can't find the container with id 31e623365300b74f7a6ce266a5f65ca1c0afaf3b7931c6cf4f8ec234b60c4dc8 Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.412194 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 08 21:33:23 crc kubenswrapper[4791]: W1208 21:33:23.417306 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f7c0b21_8653_4e60_9e31_48c491c92f1f.slice/crio-d2e8ad256087275642dbd7dc7674607f0e6edcf48873a40a3c6a9d4a3692101b WatchSource:0}: Error finding container d2e8ad256087275642dbd7dc7674607f0e6edcf48873a40a3c6a9d4a3692101b: Status 404 returned error can't find the container with id d2e8ad256087275642dbd7dc7674607f0e6edcf48873a40a3c6a9d4a3692101b Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.628278 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" event={"ID":"4d4e77a7-b71b-466f-8964-b1c4257c7c79","Type":"ContainerStarted","Data":"bfa50c8b0d1becabb93b9ead1e3f799ea70f1367e887a4a44f1ac4510cfab365"} Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.631241 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" event={"ID":"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e","Type":"ContainerStarted","Data":"e3be68ed713b2972380c1f8a5037cc29ebf2ae8faaee4d9a63a7ecf7001e7479"} Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.632271 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" event={"ID":"02e28803-31d3-4093-892d-cb6ae8ca37a0","Type":"ContainerStarted","Data":"d0690a7f5f5814d453a7bd839d3e1265351c8f37f38fd7f4871a50f06ad10dab"} Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.633636 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"542dc8e1-9fec-4080-bd2d-8e51070f73a1","Type":"ContainerStarted","Data":"31e623365300b74f7a6ce266a5f65ca1c0afaf3b7931c6cf4f8ec234b60c4dc8"} Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.635276 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a","Type":"ContainerStarted","Data":"65b245d9c108b9c568f71f531ad75685f4f5f087eaad965730f3f99ab9b25b0a"} Dec 08 21:33:23 crc kubenswrapper[4791]: I1208 21:33:23.636323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"3f7c0b21-8653-4e60-9e31-48c491c92f1f","Type":"ContainerStarted","Data":"d2e8ad256087275642dbd7dc7674607f0e6edcf48873a40a3c6a9d4a3692101b"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.675567 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" event={"ID":"ca262116-be5e-42bb-b68e-5d96c476628a","Type":"ContainerStarted","Data":"91aabaf6c01ee67ada70d9c8a9b80cd974dbac15b7de801dd09c1da9526d9be0"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.675950 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.679276 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"3f7c0b21-8653-4e60-9e31-48c491c92f1f","Type":"ContainerStarted","Data":"1d245b0c3b79557ec8ac1a48761a6b38a19cb7cb6f28b31a8a4fa0d2df1ca846"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.681455 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" event={"ID":"4d4e77a7-b71b-466f-8964-b1c4257c7c79","Type":"ContainerStarted","Data":"83636a3fb107dfc5e0dc2f8aafb2a344449caca6396020398a30f2a7513b0e81"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.694117 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" event={"ID":"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e","Type":"ContainerStarted","Data":"e38520cce2065fda5cb7ce1f4a7db131f5a39c6d5764b5e2267bab87f312a8c7"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.697406 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" event={"ID":"02e28803-31d3-4093-892d-cb6ae8ca37a0","Type":"ContainerStarted","Data":"fa57380e41d3d2fe0bb675a9d4515efc98862e5a932307e97780ea70b98d207c"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.697545 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.700623 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"2097a8a1-e800-4cf3-89bc-8f540d7e6c3a","Type":"ContainerStarted","Data":"e95018d9367d380383eaf4b6ce01c0a47bbafceccc6d9ea82916dd68de21b423"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.700967 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.702400 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" event={"ID":"544545f2-67f8-4bb2-8287-644c13874f93","Type":"ContainerStarted","Data":"aa24fd57d58d817fa026923c8973b1331e258439433799d5dd76d9ebb1cabe7b"} Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.702609 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.710016 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" podStartSLOduration=2.45732142 podStartE2EDuration="7.709989986s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:22.394247084 +0000 UTC m=+879.093005419" lastFinishedPulling="2025-12-08 21:33:27.64691563 +0000 UTC m=+884.345673985" observedRunningTime="2025-12-08 21:33:28.704385759 +0000 UTC m=+885.403144104" watchObservedRunningTime="2025-12-08 21:33:28.709989986 +0000 UTC m=+885.408748341" Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.728454 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" podStartSLOduration=2.4260552730000002 podStartE2EDuration="7.728433039s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:22.310413286 +0000 UTC m=+879.009171631" lastFinishedPulling="2025-12-08 21:33:27.612791052 +0000 UTC m=+884.311549397" observedRunningTime="2025-12-08 21:33:28.725802825 +0000 UTC m=+885.424561170" watchObservedRunningTime="2025-12-08 21:33:28.728433039 +0000 UTC m=+885.427191384" Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.751405 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-index-gateway-0" podStartSLOduration=3.244688219 podStartE2EDuration="7.751384103s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:23.384653137 +0000 UTC m=+880.083411492" lastFinishedPulling="2025-12-08 21:33:27.891349031 +0000 UTC m=+884.590107376" observedRunningTime="2025-12-08 21:33:28.750802368 +0000 UTC m=+885.449560713" watchObservedRunningTime="2025-12-08 21:33:28.751384103 +0000 UTC m=+885.450142448" Dec 08 21:33:28 crc kubenswrapper[4791]: I1208 21:33:28.772041 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" podStartSLOduration=2.792638662 podStartE2EDuration="7.772018129s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:22.673234213 +0000 UTC m=+879.371992558" lastFinishedPulling="2025-12-08 21:33:27.65261368 +0000 UTC m=+884.351372025" observedRunningTime="2025-12-08 21:33:28.767211121 +0000 UTC m=+885.465969466" watchObservedRunningTime="2025-12-08 21:33:28.772018129 +0000 UTC m=+885.470776474" Dec 08 21:33:29 crc kubenswrapper[4791]: I1208 21:33:29.712428 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"542dc8e1-9fec-4080-bd2d-8e51070f73a1","Type":"ContainerStarted","Data":"b9e0bebf0bd88c79864a5f6e6103354dc7550d08073c93f777cbd19a8973f638"} Dec 08 21:33:29 crc kubenswrapper[4791]: I1208 21:33:29.736007 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-compactor-0" podStartSLOduration=4.542528649 podStartE2EDuration="8.735984473s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:23.419554864 +0000 UTC m=+880.118313209" lastFinishedPulling="2025-12-08 21:33:27.613010688 +0000 UTC m=+884.311769033" observedRunningTime="2025-12-08 21:33:29.72974671 +0000 UTC m=+886.428505065" watchObservedRunningTime="2025-12-08 21:33:29.735984473 +0000 UTC m=+886.434742818" Dec 08 21:33:29 crc kubenswrapper[4791]: I1208 21:33:29.754083 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-ingester-0" podStartSLOduration=3.419730877 podStartE2EDuration="8.754064427s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:23.388595144 +0000 UTC m=+880.087353479" lastFinishedPulling="2025-12-08 21:33:28.722928684 +0000 UTC m=+885.421687029" observedRunningTime="2025-12-08 21:33:29.749500355 +0000 UTC m=+886.448258700" watchObservedRunningTime="2025-12-08 21:33:29.754064427 +0000 UTC m=+886.452822772" Dec 08 21:33:30 crc kubenswrapper[4791]: I1208 21:33:30.719395 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:33:31 crc kubenswrapper[4791]: I1208 21:33:31.728466 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" event={"ID":"4d4e77a7-b71b-466f-8964-b1c4257c7c79","Type":"ContainerStarted","Data":"b18fd5e3db3aba4283891f15b50f76c5358ffe905feff48a0351661e34691ba5"} Dec 08 21:33:31 crc kubenswrapper[4791]: I1208 21:33:31.728744 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:31 crc kubenswrapper[4791]: I1208 21:33:31.732184 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" event={"ID":"2d8aae18-ca02-4ba1-8b8f-ca028ccea24e","Type":"ContainerStarted","Data":"6339e8707b3456b5574ebc97194f8b053306ee7650f3f1143b45903931b54072"} Dec 08 21:33:31 crc kubenswrapper[4791]: I1208 21:33:31.738457 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:31 crc kubenswrapper[4791]: I1208 21:33:31.773108 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" podStartSLOduration=2.6970447159999997 podStartE2EDuration="10.773089171s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:23.335615614 +0000 UTC m=+880.034373959" lastFinishedPulling="2025-12-08 21:33:31.411660069 +0000 UTC m=+888.110418414" observedRunningTime="2025-12-08 21:33:31.771246636 +0000 UTC m=+888.470004981" watchObservedRunningTime="2025-12-08 21:33:31.773089171 +0000 UTC m=+888.471847516" Dec 08 21:33:31 crc kubenswrapper[4791]: I1208 21:33:31.773583 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" podStartSLOduration=2.541197949 podStartE2EDuration="10.773579593s" podCreationTimestamp="2025-12-08 21:33:21 +0000 UTC" firstStartedPulling="2025-12-08 21:33:23.178417054 +0000 UTC m=+879.877175399" lastFinishedPulling="2025-12-08 21:33:31.410798698 +0000 UTC m=+888.109557043" observedRunningTime="2025-12-08 21:33:31.752365952 +0000 UTC m=+888.451124317" watchObservedRunningTime="2025-12-08 21:33:31.773579593 +0000 UTC m=+888.472337938" Dec 08 21:33:32 crc kubenswrapper[4791]: I1208 21:33:32.733989 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:32 crc kubenswrapper[4791]: I1208 21:33:32.741457 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:32 crc kubenswrapper[4791]: I1208 21:33:32.741492 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:32 crc kubenswrapper[4791]: I1208 21:33:32.751134 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:32 crc kubenswrapper[4791]: I1208 21:33:32.751534 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-868b848d6f-mk4cx" Dec 08 21:33:32 crc kubenswrapper[4791]: I1208 21:33:32.751972 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-868b848d6f-twnll" Dec 08 21:33:32 crc kubenswrapper[4791]: I1208 21:33:32.966009 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:33:43 crc kubenswrapper[4791]: I1208 21:33:43.107892 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-index-gateway-0" Dec 08 21:33:51 crc kubenswrapper[4791]: I1208 21:33:51.691307 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-distributor-76cc67bf56-nbw68" Dec 08 21:33:51 crc kubenswrapper[4791]: I1208 21:33:51.993495 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-qrzd8" Dec 08 21:33:52 crc kubenswrapper[4791]: I1208 21:33:52.162343 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-querier-5895d59bb8-jbrkm" Dec 08 21:33:52 crc kubenswrapper[4791]: I1208 21:33:52.845015 4791 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 08 21:33:52 crc kubenswrapper[4791]: I1208 21:33:52.845080 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="542dc8e1-9fec-4080-bd2d-8e51070f73a1" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 08 21:33:52 crc kubenswrapper[4791]: I1208 21:33:52.972283 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-compactor-0" Dec 08 21:34:02 crc kubenswrapper[4791]: I1208 21:34:02.838766 4791 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 08 21:34:02 crc kubenswrapper[4791]: I1208 21:34:02.839351 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="542dc8e1-9fec-4080-bd2d-8e51070f73a1" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 08 21:34:12 crc kubenswrapper[4791]: I1208 21:34:12.840091 4791 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 08 21:34:12 crc kubenswrapper[4791]: I1208 21:34:12.840645 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="542dc8e1-9fec-4080-bd2d-8e51070f73a1" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 08 21:34:22 crc kubenswrapper[4791]: I1208 21:34:22.838435 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-ingester-0" Dec 08 21:34:35 crc kubenswrapper[4791]: I1208 21:34:35.251413 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:34:35 crc kubenswrapper[4791]: I1208 21:34:35.251971 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.904996 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-w964c"] Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.908870 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-w964c" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.912640 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.912995 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-45xll" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.913456 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.920228 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.920400 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.924679 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.935308 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-w964c"] Dec 08 21:34:41 crc kubenswrapper[4791]: I1208 21:34:41.984797 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-w964c"] Dec 08 21:34:41 crc kubenswrapper[4791]: E1208 21:34:41.985403 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[collector-syslog-receiver collector-token config config-openshift-service-cacrt datadir entrypoint kube-api-access-ldx7b metrics sa-token tmp trusted-ca], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-logging/collector-w964c" podUID="6c881248-37be-4aac-8a99-5cdfef3b6d85" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065778 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config-openshift-service-cacrt\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065826 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-metrics\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065853 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6c881248-37be-4aac-8a99-5cdfef3b6d85-datadir\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065874 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065890 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-entrypoint\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065919 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-trusted-ca\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065962 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldx7b\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-kube-api-access-ldx7b\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.065988 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-sa-token\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.066039 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-token\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.066065 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6c881248-37be-4aac-8a99-5cdfef3b6d85-tmp\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.066082 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-syslog-receiver\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.167849 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6c881248-37be-4aac-8a99-5cdfef3b6d85-tmp\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.167900 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-syslog-receiver\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.167943 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config-openshift-service-cacrt\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.167969 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-metrics\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.167999 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6c881248-37be-4aac-8a99-5cdfef3b6d85-datadir\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.168023 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.168046 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-entrypoint\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.168067 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-trusted-ca\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.168122 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldx7b\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-kube-api-access-ldx7b\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.168151 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-sa-token\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.168222 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-token\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.169273 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6c881248-37be-4aac-8a99-5cdfef3b6d85-datadir\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.169598 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config-openshift-service-cacrt\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.170139 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.170158 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-entrypoint\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.170980 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-trusted-ca\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.174287 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-metrics\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.174634 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-syslog-receiver\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.175351 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-token\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.185296 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6c881248-37be-4aac-8a99-5cdfef3b6d85-tmp\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.185441 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldx7b\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-kube-api-access-ldx7b\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.186214 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-sa-token\") pod \"collector-w964c\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.239199 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.249696 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-w964c" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.371799 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6c881248-37be-4aac-8a99-5cdfef3b6d85-datadir\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.371913 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c881248-37be-4aac-8a99-5cdfef3b6d85-datadir" (OuterVolumeSpecName: "datadir") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "datadir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.372148 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config-openshift-service-cacrt\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.372812 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-trusted-ca\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.372884 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-sa-token\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.372907 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config-openshift-service-cacrt" (OuterVolumeSpecName: "config-openshift-service-cacrt") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "config-openshift-service-cacrt". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.372968 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-entrypoint\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373007 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6c881248-37be-4aac-8a99-5cdfef3b6d85-tmp\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373028 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-token\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373052 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373077 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-syslog-receiver\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373103 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldx7b\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-kube-api-access-ldx7b\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373139 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-metrics\") pod \"6c881248-37be-4aac-8a99-5cdfef3b6d85\" (UID: \"6c881248-37be-4aac-8a99-5cdfef3b6d85\") " Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373411 4791 reconciler_common.go:293] "Volume detached for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6c881248-37be-4aac-8a99-5cdfef3b6d85-datadir\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373429 4791 reconciler_common.go:293] "Volume detached for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config-openshift-service-cacrt\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.373672 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.374791 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config" (OuterVolumeSpecName: "config") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.374830 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-entrypoint" (OuterVolumeSpecName: "entrypoint") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "entrypoint". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.376275 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c881248-37be-4aac-8a99-5cdfef3b6d85-tmp" (OuterVolumeSpecName: "tmp") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "tmp". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.376539 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-sa-token" (OuterVolumeSpecName: "sa-token") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.376582 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-token" (OuterVolumeSpecName: "collector-token") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "collector-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.376876 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-kube-api-access-ldx7b" (OuterVolumeSpecName: "kube-api-access-ldx7b") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "kube-api-access-ldx7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.376980 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-metrics" (OuterVolumeSpecName: "metrics") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.377377 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-syslog-receiver" (OuterVolumeSpecName: "collector-syslog-receiver") pod "6c881248-37be-4aac-8a99-5cdfef3b6d85" (UID: "6c881248-37be-4aac-8a99-5cdfef3b6d85"). InnerVolumeSpecName "collector-syslog-receiver". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475034 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldx7b\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-kube-api-access-ldx7b\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475100 4791 reconciler_common.go:293] "Volume detached for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-metrics\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475110 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475119 4791 reconciler_common.go:293] "Volume detached for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6c881248-37be-4aac-8a99-5cdfef3b6d85-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475129 4791 reconciler_common.go:293] "Volume detached for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-entrypoint\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475137 4791 reconciler_common.go:293] "Volume detached for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6c881248-37be-4aac-8a99-5cdfef3b6d85-tmp\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475144 4791 reconciler_common.go:293] "Volume detached for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475152 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c881248-37be-4aac-8a99-5cdfef3b6d85-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:42 crc kubenswrapper[4791]: I1208 21:34:42.475161 4791 reconciler_common.go:293] "Volume detached for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6c881248-37be-4aac-8a99-5cdfef3b6d85-collector-syslog-receiver\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.247657 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-w964c" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.315302 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-w964c"] Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.322323 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-logging/collector-w964c"] Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.328677 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-hvfq9"] Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.329878 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.340615 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.341033 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.341257 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.341489 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-45xll" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.341678 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.344703 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.346271 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-hvfq9"] Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497222 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-config\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497301 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8wnx\" (UniqueName: \"kubernetes.io/projected/336b6c6a-4e6c-4123-9c5e-676554f18718-kube-api-access-c8wnx\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497336 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/336b6c6a-4e6c-4123-9c5e-676554f18718-sa-token\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497357 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-collector-token\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497420 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/336b6c6a-4e6c-4123-9c5e-676554f18718-tmp\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497456 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-trusted-ca\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497506 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-config-openshift-service-cacrt\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497532 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-entrypoint\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497598 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/336b6c6a-4e6c-4123-9c5e-676554f18718-datadir\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497628 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-metrics\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.497690 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-collector-syslog-receiver\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598354 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-config-openshift-service-cacrt\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598401 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-entrypoint\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598445 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/336b6c6a-4e6c-4123-9c5e-676554f18718-datadir\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598474 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-metrics\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598514 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-collector-syslog-receiver\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598592 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-config\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598591 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/336b6c6a-4e6c-4123-9c5e-676554f18718-datadir\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598625 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8wnx\" (UniqueName: \"kubernetes.io/projected/336b6c6a-4e6c-4123-9c5e-676554f18718-kube-api-access-c8wnx\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598670 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/336b6c6a-4e6c-4123-9c5e-676554f18718-sa-token\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598691 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-collector-token\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598746 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/336b6c6a-4e6c-4123-9c5e-676554f18718-tmp\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.598776 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-trusted-ca\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.599281 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-config-openshift-service-cacrt\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.603202 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.605803 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.606092 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.613775 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.619297 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/336b6c6a-4e6c-4123-9c5e-676554f18718-tmp\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.624420 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8wnx\" (UniqueName: \"kubernetes.io/projected/336b6c6a-4e6c-4123-9c5e-676554f18718-kube-api-access-c8wnx\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.624765 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-config\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.625183 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-collector-syslog-receiver\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.627788 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-collector-token\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.627846 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-entrypoint\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.628409 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/336b6c6a-4e6c-4123-9c5e-676554f18718-sa-token\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.629170 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.629862 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/336b6c6a-4e6c-4123-9c5e-676554f18718-metrics\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.631358 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c881248-37be-4aac-8a99-5cdfef3b6d85" path="/var/lib/kubelet/pods/6c881248-37be-4aac-8a99-5cdfef3b6d85/volumes" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.640143 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/336b6c6a-4e6c-4123-9c5e-676554f18718-trusted-ca\") pod \"collector-hvfq9\" (UID: \"336b6c6a-4e6c-4123-9c5e-676554f18718\") " pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.652822 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-45xll" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.661210 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-hvfq9" Dec 08 21:34:43 crc kubenswrapper[4791]: I1208 21:34:43.886059 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-hvfq9"] Dec 08 21:34:44 crc kubenswrapper[4791]: I1208 21:34:44.254178 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-hvfq9" event={"ID":"336b6c6a-4e6c-4123-9c5e-676554f18718","Type":"ContainerStarted","Data":"e20481b9b6028519b84a342daac9759fd9af5c649a5217eaf2917e5f7e31c6ee"} Dec 08 21:34:51 crc kubenswrapper[4791]: I1208 21:34:51.643592 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-hvfq9" event={"ID":"336b6c6a-4e6c-4123-9c5e-676554f18718","Type":"ContainerStarted","Data":"06c779d4392be1d729be2904b205583704fc55df40f4015166fccf3b4a81955b"} Dec 08 21:34:51 crc kubenswrapper[4791]: I1208 21:34:51.664677 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-hvfq9" podStartSLOduration=1.147825017 podStartE2EDuration="8.664660844s" podCreationTimestamp="2025-12-08 21:34:43 +0000 UTC" firstStartedPulling="2025-12-08 21:34:43.893783481 +0000 UTC m=+960.592541826" lastFinishedPulling="2025-12-08 21:34:51.410619308 +0000 UTC m=+968.109377653" observedRunningTime="2025-12-08 21:34:51.662078201 +0000 UTC m=+968.360836556" watchObservedRunningTime="2025-12-08 21:34:51.664660844 +0000 UTC m=+968.363419189" Dec 08 21:35:05 crc kubenswrapper[4791]: I1208 21:35:05.251223 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:35:05 crc kubenswrapper[4791]: I1208 21:35:05.251676 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.716008 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf"] Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.717938 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.720267 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.726871 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf"] Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.809647 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.809759 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.809848 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82kvs\" (UniqueName: \"kubernetes.io/projected/e8f61217-10a0-4bde-869e-28ad91534e07-kube-api-access-82kvs\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.910823 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82kvs\" (UniqueName: \"kubernetes.io/projected/e8f61217-10a0-4bde-869e-28ad91534e07-kube-api-access-82kvs\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.910927 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.910982 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.911510 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.911608 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:24 crc kubenswrapper[4791]: I1208 21:35:24.930186 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82kvs\" (UniqueName: \"kubernetes.io/projected/e8f61217-10a0-4bde-869e-28ad91534e07-kube-api-access-82kvs\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:25 crc kubenswrapper[4791]: I1208 21:35:25.087559 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:25 crc kubenswrapper[4791]: I1208 21:35:25.639065 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf"] Dec 08 21:35:25 crc kubenswrapper[4791]: I1208 21:35:25.868095 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" event={"ID":"e8f61217-10a0-4bde-869e-28ad91534e07","Type":"ContainerStarted","Data":"0d8619aedcca1ce20a2f824428c5cb08783abedc204073fb884258d76f986c56"} Dec 08 21:35:26 crc kubenswrapper[4791]: I1208 21:35:26.877292 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" event={"ID":"e8f61217-10a0-4bde-869e-28ad91534e07","Type":"ContainerStarted","Data":"8056155a1fa2378796ebe7624cbfb36261098b0908dfad10d58fe70c45d26880"} Dec 08 21:35:27 crc kubenswrapper[4791]: I1208 21:35:27.885514 4791 generic.go:334] "Generic (PLEG): container finished" podID="e8f61217-10a0-4bde-869e-28ad91534e07" containerID="8056155a1fa2378796ebe7624cbfb36261098b0908dfad10d58fe70c45d26880" exitCode=0 Dec 08 21:35:27 crc kubenswrapper[4791]: I1208 21:35:27.885583 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" event={"ID":"e8f61217-10a0-4bde-869e-28ad91534e07","Type":"ContainerDied","Data":"8056155a1fa2378796ebe7624cbfb36261098b0908dfad10d58fe70c45d26880"} Dec 08 21:35:27 crc kubenswrapper[4791]: I1208 21:35:27.887572 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:35:29 crc kubenswrapper[4791]: I1208 21:35:29.920696 4791 generic.go:334] "Generic (PLEG): container finished" podID="e8f61217-10a0-4bde-869e-28ad91534e07" containerID="fa8225663b9c1baf52d370331b0849853cbcacd6ae73437a120fed7665afd09a" exitCode=0 Dec 08 21:35:29 crc kubenswrapper[4791]: I1208 21:35:29.920949 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" event={"ID":"e8f61217-10a0-4bde-869e-28ad91534e07","Type":"ContainerDied","Data":"fa8225663b9c1baf52d370331b0849853cbcacd6ae73437a120fed7665afd09a"} Dec 08 21:35:30 crc kubenswrapper[4791]: I1208 21:35:30.929754 4791 generic.go:334] "Generic (PLEG): container finished" podID="e8f61217-10a0-4bde-869e-28ad91534e07" containerID="41136b1e2c38c8831c25ce6aba41615c281f4aa96d2d3f75836147ff4b8076d8" exitCode=0 Dec 08 21:35:30 crc kubenswrapper[4791]: I1208 21:35:30.929797 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" event={"ID":"e8f61217-10a0-4bde-869e-28ad91534e07","Type":"ContainerDied","Data":"41136b1e2c38c8831c25ce6aba41615c281f4aa96d2d3f75836147ff4b8076d8"} Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.190618 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.222641 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-bundle\") pod \"e8f61217-10a0-4bde-869e-28ad91534e07\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.222700 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82kvs\" (UniqueName: \"kubernetes.io/projected/e8f61217-10a0-4bde-869e-28ad91534e07-kube-api-access-82kvs\") pod \"e8f61217-10a0-4bde-869e-28ad91534e07\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.222756 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-util\") pod \"e8f61217-10a0-4bde-869e-28ad91534e07\" (UID: \"e8f61217-10a0-4bde-869e-28ad91534e07\") " Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.223564 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-bundle" (OuterVolumeSpecName: "bundle") pod "e8f61217-10a0-4bde-869e-28ad91534e07" (UID: "e8f61217-10a0-4bde-869e-28ad91534e07"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.228868 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8f61217-10a0-4bde-869e-28ad91534e07-kube-api-access-82kvs" (OuterVolumeSpecName: "kube-api-access-82kvs") pod "e8f61217-10a0-4bde-869e-28ad91534e07" (UID: "e8f61217-10a0-4bde-869e-28ad91534e07"). InnerVolumeSpecName "kube-api-access-82kvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.248488 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-util" (OuterVolumeSpecName: "util") pod "e8f61217-10a0-4bde-869e-28ad91534e07" (UID: "e8f61217-10a0-4bde-869e-28ad91534e07"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.325014 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.325070 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82kvs\" (UniqueName: \"kubernetes.io/projected/e8f61217-10a0-4bde-869e-28ad91534e07-kube-api-access-82kvs\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.325090 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e8f61217-10a0-4bde-869e-28ad91534e07-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.944706 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" event={"ID":"e8f61217-10a0-4bde-869e-28ad91534e07","Type":"ContainerDied","Data":"0d8619aedcca1ce20a2f824428c5cb08783abedc204073fb884258d76f986c56"} Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.944768 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d8619aedcca1ce20a2f824428c5cb08783abedc204073fb884258d76f986c56" Dec 08 21:35:32 crc kubenswrapper[4791]: I1208 21:35:32.944828 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf" Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.251892 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.252211 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.252273 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.253216 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11eb0a686e8878342818f508b449514d71e01b400661915caf0121820b7c92ca"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.253328 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://11eb0a686e8878342818f508b449514d71e01b400661915caf0121820b7c92ca" gracePeriod=600 Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.969533 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="11eb0a686e8878342818f508b449514d71e01b400661915caf0121820b7c92ca" exitCode=0 Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.969803 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"11eb0a686e8878342818f508b449514d71e01b400661915caf0121820b7c92ca"} Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.970117 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"2c818dc85098516c004b18fbf01aa57ea0f0a817f59523978cf0e86c2b78304e"} Dec 08 21:35:35 crc kubenswrapper[4791]: I1208 21:35:35.970138 4791 scope.go:117] "RemoveContainer" containerID="905d580dece9c9ce3b0d4703b0d2f998c3f8e981a36384e1aaaf1f0e9b3109d3" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.448867 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7"] Dec 08 21:35:36 crc kubenswrapper[4791]: E1208 21:35:36.449199 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8f61217-10a0-4bde-869e-28ad91534e07" containerName="pull" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.449215 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8f61217-10a0-4bde-869e-28ad91534e07" containerName="pull" Dec 08 21:35:36 crc kubenswrapper[4791]: E1208 21:35:36.449236 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8f61217-10a0-4bde-869e-28ad91534e07" containerName="extract" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.449243 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8f61217-10a0-4bde-869e-28ad91534e07" containerName="extract" Dec 08 21:35:36 crc kubenswrapper[4791]: E1208 21:35:36.449261 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8f61217-10a0-4bde-869e-28ad91534e07" containerName="util" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.449269 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8f61217-10a0-4bde-869e-28ad91534e07" containerName="util" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.449434 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8f61217-10a0-4bde-869e-28ad91534e07" containerName="extract" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.450061 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.452305 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.452417 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-95mzv" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.452535 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.468313 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7"] Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.495654 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5s89\" (UniqueName: \"kubernetes.io/projected/caf3748e-6f48-478e-b1ee-b6e861f9e9c2-kube-api-access-d5s89\") pod \"nmstate-operator-5b5b58f5c8-rjsz7\" (UID: \"caf3748e-6f48-478e-b1ee-b6e861f9e9c2\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.596927 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5s89\" (UniqueName: \"kubernetes.io/projected/caf3748e-6f48-478e-b1ee-b6e861f9e9c2-kube-api-access-d5s89\") pod \"nmstate-operator-5b5b58f5c8-rjsz7\" (UID: \"caf3748e-6f48-478e-b1ee-b6e861f9e9c2\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.617664 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5s89\" (UniqueName: \"kubernetes.io/projected/caf3748e-6f48-478e-b1ee-b6e861f9e9c2-kube-api-access-d5s89\") pod \"nmstate-operator-5b5b58f5c8-rjsz7\" (UID: \"caf3748e-6f48-478e-b1ee-b6e861f9e9c2\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" Dec 08 21:35:36 crc kubenswrapper[4791]: I1208 21:35:36.768328 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" Dec 08 21:35:37 crc kubenswrapper[4791]: I1208 21:35:37.263007 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7"] Dec 08 21:35:37 crc kubenswrapper[4791]: W1208 21:35:37.271936 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcaf3748e_6f48_478e_b1ee_b6e861f9e9c2.slice/crio-bebdefe34f870a63e493ad005d1cfe2400197c27b1bc2349805b0a82660727a9 WatchSource:0}: Error finding container bebdefe34f870a63e493ad005d1cfe2400197c27b1bc2349805b0a82660727a9: Status 404 returned error can't find the container with id bebdefe34f870a63e493ad005d1cfe2400197c27b1bc2349805b0a82660727a9 Dec 08 21:35:37 crc kubenswrapper[4791]: I1208 21:35:37.989080 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" event={"ID":"caf3748e-6f48-478e-b1ee-b6e861f9e9c2","Type":"ContainerStarted","Data":"bebdefe34f870a63e493ad005d1cfe2400197c27b1bc2349805b0a82660727a9"} Dec 08 21:35:40 crc kubenswrapper[4791]: I1208 21:35:40.005061 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" event={"ID":"caf3748e-6f48-478e-b1ee-b6e861f9e9c2","Type":"ContainerStarted","Data":"80e0b9699f80b2bda02d15c5c1fe28d5e26c5c1fec2ac7f8e81d4da8e8acf138"} Dec 08 21:35:40 crc kubenswrapper[4791]: I1208 21:35:40.027962 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-rjsz7" podStartSLOduration=1.8149705699999998 podStartE2EDuration="4.027936629s" podCreationTimestamp="2025-12-08 21:35:36 +0000 UTC" firstStartedPulling="2025-12-08 21:35:37.273726575 +0000 UTC m=+1013.972484920" lastFinishedPulling="2025-12-08 21:35:39.486692634 +0000 UTC m=+1016.185450979" observedRunningTime="2025-12-08 21:35:40.022430002 +0000 UTC m=+1016.721188367" watchObservedRunningTime="2025-12-08 21:35:40.027936629 +0000 UTC m=+1016.726694994" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.020831 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.022904 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.039696 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.042583 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.055730 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-tr8vj" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.056279 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.063630 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrvwn\" (UniqueName: \"kubernetes.io/projected/9b839d23-b374-47d4-ae08-37eef97533d6-kube-api-access-qrvwn\") pod \"nmstate-metrics-7f946cbc9-4snw4\" (UID: \"9b839d23-b374-47d4-ae08-37eef97533d6\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.072733 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.079542 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.086048 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-w5q9k"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.087345 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.166625 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-nmstate-lock\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.166721 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f00220d9-a8b6-4197-b2e4-acc99f79e2da-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-l52qv\" (UID: \"f00220d9-a8b6-4197-b2e4-acc99f79e2da\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.166771 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-dbus-socket\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.166848 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrm6l\" (UniqueName: \"kubernetes.io/projected/da3c6939-b434-456a-9593-52b6793d4c53-kube-api-access-jrm6l\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.166899 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrvwn\" (UniqueName: \"kubernetes.io/projected/9b839d23-b374-47d4-ae08-37eef97533d6-kube-api-access-qrvwn\") pod \"nmstate-metrics-7f946cbc9-4snw4\" (UID: \"9b839d23-b374-47d4-ae08-37eef97533d6\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.166930 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-ovs-socket\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.166961 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bkv7\" (UniqueName: \"kubernetes.io/projected/f00220d9-a8b6-4197-b2e4-acc99f79e2da-kube-api-access-9bkv7\") pod \"nmstate-webhook-5f6d4c5ccb-l52qv\" (UID: \"f00220d9-a8b6-4197-b2e4-acc99f79e2da\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.188672 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrvwn\" (UniqueName: \"kubernetes.io/projected/9b839d23-b374-47d4-ae08-37eef97533d6-kube-api-access-qrvwn\") pod \"nmstate-metrics-7f946cbc9-4snw4\" (UID: \"9b839d23-b374-47d4-ae08-37eef97533d6\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.215094 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.216236 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.224267 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-wk7hp" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.224404 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.224512 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.229986 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.268482 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a9643486-e98d-4bac-8f5b-202e0e1c9551-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.268822 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9643486-e98d-4bac-8f5b-202e0e1c9551-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.268993 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzwkr\" (UniqueName: \"kubernetes.io/projected/a9643486-e98d-4bac-8f5b-202e0e1c9551-kube-api-access-rzwkr\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269117 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrm6l\" (UniqueName: \"kubernetes.io/projected/da3c6939-b434-456a-9593-52b6793d4c53-kube-api-access-jrm6l\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269290 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-ovs-socket\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269410 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bkv7\" (UniqueName: \"kubernetes.io/projected/f00220d9-a8b6-4197-b2e4-acc99f79e2da-kube-api-access-9bkv7\") pod \"nmstate-webhook-5f6d4c5ccb-l52qv\" (UID: \"f00220d9-a8b6-4197-b2e4-acc99f79e2da\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269541 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-nmstate-lock\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269717 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f00220d9-a8b6-4197-b2e4-acc99f79e2da-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-l52qv\" (UID: \"f00220d9-a8b6-4197-b2e4-acc99f79e2da\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269852 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-dbus-socket\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269584 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-ovs-socket\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: E1208 21:35:46.269776 4791 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 08 21:35:46 crc kubenswrapper[4791]: E1208 21:35:46.270179 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f00220d9-a8b6-4197-b2e4-acc99f79e2da-tls-key-pair podName:f00220d9-a8b6-4197-b2e4-acc99f79e2da nodeName:}" failed. No retries permitted until 2025-12-08 21:35:46.770158382 +0000 UTC m=+1023.468916727 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/f00220d9-a8b6-4197-b2e4-acc99f79e2da-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-l52qv" (UID: "f00220d9-a8b6-4197-b2e4-acc99f79e2da") : secret "openshift-nmstate-webhook" not found Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.269721 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-nmstate-lock\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.271039 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/da3c6939-b434-456a-9593-52b6793d4c53-dbus-socket\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.298351 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrm6l\" (UniqueName: \"kubernetes.io/projected/da3c6939-b434-456a-9593-52b6793d4c53-kube-api-access-jrm6l\") pod \"nmstate-handler-w5q9k\" (UID: \"da3c6939-b434-456a-9593-52b6793d4c53\") " pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.299232 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bkv7\" (UniqueName: \"kubernetes.io/projected/f00220d9-a8b6-4197-b2e4-acc99f79e2da-kube-api-access-9bkv7\") pod \"nmstate-webhook-5f6d4c5ccb-l52qv\" (UID: \"f00220d9-a8b6-4197-b2e4-acc99f79e2da\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.371722 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a9643486-e98d-4bac-8f5b-202e0e1c9551-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.371992 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9643486-e98d-4bac-8f5b-202e0e1c9551-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.372109 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzwkr\" (UniqueName: \"kubernetes.io/projected/a9643486-e98d-4bac-8f5b-202e0e1c9551-kube-api-access-rzwkr\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: E1208 21:35:46.372201 4791 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 08 21:35:46 crc kubenswrapper[4791]: E1208 21:35:46.372286 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a9643486-e98d-4bac-8f5b-202e0e1c9551-plugin-serving-cert podName:a9643486-e98d-4bac-8f5b-202e0e1c9551 nodeName:}" failed. No retries permitted until 2025-12-08 21:35:46.872265832 +0000 UTC m=+1023.571024177 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/a9643486-e98d-4bac-8f5b-202e0e1c9551-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-b4kn6" (UID: "a9643486-e98d-4bac-8f5b-202e0e1c9551") : secret "plugin-serving-cert" not found Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.373203 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a9643486-e98d-4bac-8f5b-202e0e1c9551-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.380412 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.392857 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzwkr\" (UniqueName: \"kubernetes.io/projected/a9643486-e98d-4bac-8f5b-202e0e1c9551-kube-api-access-rzwkr\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.412327 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.461574 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5f6b5c5f6d-96c6d"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.462777 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.481647 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5f6b5c5f6d-96c6d"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.576166 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-oauth-config\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.576228 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-service-ca\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.576279 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-oauth-serving-cert\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.576429 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-serving-cert\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.576631 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm5xg\" (UniqueName: \"kubernetes.io/projected/f9971ba5-c02c-4190-8e5e-d2fc36342deb-kube-api-access-qm5xg\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.576782 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-trusted-ca-bundle\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.576806 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-config\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.684644 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-trusted-ca-bundle\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.684694 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-config\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.684809 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-oauth-config\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.684840 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-service-ca\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.684871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-oauth-serving-cert\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.684920 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-serving-cert\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.685009 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm5xg\" (UniqueName: \"kubernetes.io/projected/f9971ba5-c02c-4190-8e5e-d2fc36342deb-kube-api-access-qm5xg\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.686006 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-trusted-ca-bundle\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.686665 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-config\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.687115 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-oauth-serving-cert\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.687419 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f9971ba5-c02c-4190-8e5e-d2fc36342deb-service-ca\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.690350 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-serving-cert\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.690445 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f9971ba5-c02c-4190-8e5e-d2fc36342deb-console-oauth-config\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.712830 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm5xg\" (UniqueName: \"kubernetes.io/projected/f9971ba5-c02c-4190-8e5e-d2fc36342deb-kube-api-access-qm5xg\") pod \"console-5f6b5c5f6d-96c6d\" (UID: \"f9971ba5-c02c-4190-8e5e-d2fc36342deb\") " pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.786508 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f00220d9-a8b6-4197-b2e4-acc99f79e2da-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-l52qv\" (UID: \"f00220d9-a8b6-4197-b2e4-acc99f79e2da\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.790240 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f00220d9-a8b6-4197-b2e4-acc99f79e2da-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-l52qv\" (UID: \"f00220d9-a8b6-4197-b2e4-acc99f79e2da\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.800056 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.887990 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9643486-e98d-4bac-8f5b-202e0e1c9551-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.894327 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9643486-e98d-4bac-8f5b-202e0e1c9551-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-b4kn6\" (UID: \"a9643486-e98d-4bac-8f5b-202e0e1c9551\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.937073 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4"] Dec 08 21:35:46 crc kubenswrapper[4791]: I1208 21:35:46.991169 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:47 crc kubenswrapper[4791]: I1208 21:35:47.080527 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-w5q9k" event={"ID":"da3c6939-b434-456a-9593-52b6793d4c53","Type":"ContainerStarted","Data":"c137b33708d2ad7a934103e17377d0436b47294560635ab30b6e798c0f4e37a2"} Dec 08 21:35:47 crc kubenswrapper[4791]: I1208 21:35:47.081931 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" event={"ID":"9b839d23-b374-47d4-ae08-37eef97533d6","Type":"ContainerStarted","Data":"45f54c5f604a10764e5961130f674e2b245279b1b71e0f8bddff3d382d111ba1"} Dec 08 21:35:47 crc kubenswrapper[4791]: I1208 21:35:47.169064 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" Dec 08 21:35:47 crc kubenswrapper[4791]: I1208 21:35:47.281819 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5f6b5c5f6d-96c6d"] Dec 08 21:35:47 crc kubenswrapper[4791]: I1208 21:35:47.403102 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv"] Dec 08 21:35:47 crc kubenswrapper[4791]: I1208 21:35:47.610831 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6"] Dec 08 21:35:47 crc kubenswrapper[4791]: W1208 21:35:47.613501 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9643486_e98d_4bac_8f5b_202e0e1c9551.slice/crio-9d7280c55029135d4b4384b626fecab65bf1fb3f837be194e6524e1ce74ee6c1 WatchSource:0}: Error finding container 9d7280c55029135d4b4384b626fecab65bf1fb3f837be194e6524e1ce74ee6c1: Status 404 returned error can't find the container with id 9d7280c55029135d4b4384b626fecab65bf1fb3f837be194e6524e1ce74ee6c1 Dec 08 21:35:48 crc kubenswrapper[4791]: I1208 21:35:48.097765 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" event={"ID":"a9643486-e98d-4bac-8f5b-202e0e1c9551","Type":"ContainerStarted","Data":"9d7280c55029135d4b4384b626fecab65bf1fb3f837be194e6524e1ce74ee6c1"} Dec 08 21:35:48 crc kubenswrapper[4791]: I1208 21:35:48.099451 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5f6b5c5f6d-96c6d" event={"ID":"f9971ba5-c02c-4190-8e5e-d2fc36342deb","Type":"ContainerStarted","Data":"63e68419f3d31d19096ae78858824975f1150866354514ed89ab4ed9a59a50c9"} Dec 08 21:35:48 crc kubenswrapper[4791]: I1208 21:35:48.099476 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5f6b5c5f6d-96c6d" event={"ID":"f9971ba5-c02c-4190-8e5e-d2fc36342deb","Type":"ContainerStarted","Data":"8cab8b6538776196ca7947c4167e32de5db5b951b1a59e70847bf869aa2ed660"} Dec 08 21:35:48 crc kubenswrapper[4791]: I1208 21:35:48.105775 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" event={"ID":"f00220d9-a8b6-4197-b2e4-acc99f79e2da","Type":"ContainerStarted","Data":"6591e48ddec978eed3f374478030cc35723ad3e1cba68083df13bfae9f6bd08f"} Dec 08 21:35:48 crc kubenswrapper[4791]: I1208 21:35:48.149951 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5f6b5c5f6d-96c6d" podStartSLOduration=2.1499318609999998 podStartE2EDuration="2.149931861s" podCreationTimestamp="2025-12-08 21:35:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:35:48.128828938 +0000 UTC m=+1024.827587313" watchObservedRunningTime="2025-12-08 21:35:48.149931861 +0000 UTC m=+1024.848690206" Dec 08 21:35:50 crc kubenswrapper[4791]: I1208 21:35:50.121346 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" event={"ID":"f00220d9-a8b6-4197-b2e4-acc99f79e2da","Type":"ContainerStarted","Data":"26d95def79a7cdd9d67e62ea83b62d27d932745a8fb3271e1d5619ffb6048244"} Dec 08 21:35:50 crc kubenswrapper[4791]: I1208 21:35:50.123231 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:35:50 crc kubenswrapper[4791]: I1208 21:35:50.129120 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" event={"ID":"9b839d23-b374-47d4-ae08-37eef97533d6","Type":"ContainerStarted","Data":"29b0cf7dfef15d2fb5136e38ea1036e0521970bf3c7a71d569daee1a4d3f022e"} Dec 08 21:35:50 crc kubenswrapper[4791]: I1208 21:35:50.130889 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-w5q9k" event={"ID":"da3c6939-b434-456a-9593-52b6793d4c53","Type":"ContainerStarted","Data":"33438aa37bdae892b3c3827d0963e09d6e170cc6cf73bb66774a3b0cc2ce89b5"} Dec 08 21:35:50 crc kubenswrapper[4791]: I1208 21:35:50.131758 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:50 crc kubenswrapper[4791]: I1208 21:35:50.149922 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" podStartSLOduration=2.355162799 podStartE2EDuration="4.149898111s" podCreationTimestamp="2025-12-08 21:35:46 +0000 UTC" firstStartedPulling="2025-12-08 21:35:47.432155912 +0000 UTC m=+1024.130914257" lastFinishedPulling="2025-12-08 21:35:49.226891224 +0000 UTC m=+1025.925649569" observedRunningTime="2025-12-08 21:35:50.144926498 +0000 UTC m=+1026.843684843" watchObservedRunningTime="2025-12-08 21:35:50.149898111 +0000 UTC m=+1026.848656486" Dec 08 21:35:50 crc kubenswrapper[4791]: I1208 21:35:50.180251 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-w5q9k" podStartSLOduration=1.3976590469999999 podStartE2EDuration="4.180227153s" podCreationTimestamp="2025-12-08 21:35:46 +0000 UTC" firstStartedPulling="2025-12-08 21:35:46.449352133 +0000 UTC m=+1023.148110478" lastFinishedPulling="2025-12-08 21:35:49.231920229 +0000 UTC m=+1025.930678584" observedRunningTime="2025-12-08 21:35:50.164514943 +0000 UTC m=+1026.863273288" watchObservedRunningTime="2025-12-08 21:35:50.180227153 +0000 UTC m=+1026.878985498" Dec 08 21:35:51 crc kubenswrapper[4791]: I1208 21:35:51.144153 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" event={"ID":"a9643486-e98d-4bac-8f5b-202e0e1c9551","Type":"ContainerStarted","Data":"e3bc9a8a8e60716c654f3363f56f16450ca2a5b2c8bdca482a588675caac71ef"} Dec 08 21:35:51 crc kubenswrapper[4791]: I1208 21:35:51.173701 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-b4kn6" podStartSLOduration=2.471252025 podStartE2EDuration="5.173632574s" podCreationTimestamp="2025-12-08 21:35:46 +0000 UTC" firstStartedPulling="2025-12-08 21:35:47.61529029 +0000 UTC m=+1024.314048635" lastFinishedPulling="2025-12-08 21:35:50.317670839 +0000 UTC m=+1027.016429184" observedRunningTime="2025-12-08 21:35:51.166766683 +0000 UTC m=+1027.865525058" watchObservedRunningTime="2025-12-08 21:35:51.173632574 +0000 UTC m=+1027.872390919" Dec 08 21:35:52 crc kubenswrapper[4791]: I1208 21:35:52.154078 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" event={"ID":"9b839d23-b374-47d4-ae08-37eef97533d6","Type":"ContainerStarted","Data":"ad0b906ea3d1cc8ead2e6e81b932649de67778bbc1ee6dbff9189aa0bf364c43"} Dec 08 21:35:52 crc kubenswrapper[4791]: I1208 21:35:52.170678 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4snw4" podStartSLOduration=1.327266922 podStartE2EDuration="6.170659365s" podCreationTimestamp="2025-12-08 21:35:46 +0000 UTC" firstStartedPulling="2025-12-08 21:35:46.949380876 +0000 UTC m=+1023.648139221" lastFinishedPulling="2025-12-08 21:35:51.792773319 +0000 UTC m=+1028.491531664" observedRunningTime="2025-12-08 21:35:52.169283991 +0000 UTC m=+1028.868042356" watchObservedRunningTime="2025-12-08 21:35:52.170659365 +0000 UTC m=+1028.869417720" Dec 08 21:35:56 crc kubenswrapper[4791]: I1208 21:35:56.439961 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-w5q9k" Dec 08 21:35:56 crc kubenswrapper[4791]: I1208 21:35:56.800830 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:56 crc kubenswrapper[4791]: I1208 21:35:56.800895 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:56 crc kubenswrapper[4791]: I1208 21:35:56.806852 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:57 crc kubenswrapper[4791]: I1208 21:35:57.194745 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5f6b5c5f6d-96c6d" Dec 08 21:35:57 crc kubenswrapper[4791]: I1208 21:35:57.262174 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-86c75cd9c-lmfh7"] Dec 08 21:36:06 crc kubenswrapper[4791]: I1208 21:36:06.998145 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-l52qv" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.321120 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-86c75cd9c-lmfh7" podUID="23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" containerName="console" containerID="cri-o://4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec" gracePeriod=15 Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.637560 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n"] Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.640989 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.646827 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.653614 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n"] Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.717499 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf8hc\" (UniqueName: \"kubernetes.io/projected/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-kube-api-access-bf8hc\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.717617 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.718317 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.743774 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-86c75cd9c-lmfh7_23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa/console/0.log" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.744047 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820260 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-trusted-ca-bundle\") pod \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820399 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm45b\" (UniqueName: \"kubernetes.io/projected/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-kube-api-access-lm45b\") pod \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820432 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-service-ca\") pod \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820481 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-oauth-config\") pod \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820545 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-serving-cert\") pod \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820561 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-config\") pod \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820592 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-oauth-serving-cert\") pod \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\" (UID: \"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa\") " Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820847 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820911 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf8hc\" (UniqueName: \"kubernetes.io/projected/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-kube-api-access-bf8hc\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.820955 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.821164 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" (UID: "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.821449 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-config" (OuterVolumeSpecName: "console-config") pod "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" (UID: "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.821491 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-service-ca" (OuterVolumeSpecName: "service-ca") pod "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" (UID: "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.821533 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.821570 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.821969 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" (UID: "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.827857 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" (UID: "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.829142 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-kube-api-access-lm45b" (OuterVolumeSpecName: "kube-api-access-lm45b") pod "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" (UID: "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa"). InnerVolumeSpecName "kube-api-access-lm45b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.839626 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf8hc\" (UniqueName: \"kubernetes.io/projected/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-kube-api-access-bf8hc\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.851995 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" (UID: "23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.922938 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm45b\" (UniqueName: \"kubernetes.io/projected/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-kube-api-access-lm45b\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.922977 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.922988 4791 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.923000 4791 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.923012 4791 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-console-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.923021 4791 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:22 crc kubenswrapper[4791]: I1208 21:36:22.923029 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.041108 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.401074 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-86c75cd9c-lmfh7_23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa/console/0.log" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.401125 4791 generic.go:334] "Generic (PLEG): container finished" podID="23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" containerID="4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec" exitCode=2 Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.401161 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86c75cd9c-lmfh7" event={"ID":"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa","Type":"ContainerDied","Data":"4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec"} Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.401186 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86c75cd9c-lmfh7" event={"ID":"23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa","Type":"ContainerDied","Data":"71991288d6da38dc9fdcce84a4b146a3683a49436c31ababde6b949cd0c69215"} Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.401206 4791 scope.go:117] "RemoveContainer" containerID="4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.401383 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86c75cd9c-lmfh7" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.425915 4791 scope.go:117] "RemoveContainer" containerID="4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec" Dec 08 21:36:23 crc kubenswrapper[4791]: E1208 21:36:23.427349 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec\": container with ID starting with 4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec not found: ID does not exist" containerID="4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.427396 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec"} err="failed to get container status \"4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec\": rpc error: code = NotFound desc = could not find container \"4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec\": container with ID starting with 4d49db4497b83f933e9732726dfd3d07802a73f63701c87b950a6184cfc56fec not found: ID does not exist" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.436930 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-86c75cd9c-lmfh7"] Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.440589 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-86c75cd9c-lmfh7"] Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.610799 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" path="/var/lib/kubelet/pods/23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa/volumes" Dec 08 21:36:23 crc kubenswrapper[4791]: I1208 21:36:23.626432 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n"] Dec 08 21:36:23 crc kubenswrapper[4791]: W1208 21:36:23.633053 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccbca2a1_3575_4209_bf34_d3812ad3bc1a.slice/crio-24d310827f9119ef0069d2425dc5469b66c86f64fdcedadcc9098def7166c939 WatchSource:0}: Error finding container 24d310827f9119ef0069d2425dc5469b66c86f64fdcedadcc9098def7166c939: Status 404 returned error can't find the container with id 24d310827f9119ef0069d2425dc5469b66c86f64fdcedadcc9098def7166c939 Dec 08 21:36:24 crc kubenswrapper[4791]: I1208 21:36:24.409757 4791 generic.go:334] "Generic (PLEG): container finished" podID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerID="6167d87568a8548531da406b3fefb94d1e9370b6aaf274d25d23da4dc5d2ae13" exitCode=0 Dec 08 21:36:24 crc kubenswrapper[4791]: I1208 21:36:24.409849 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" event={"ID":"ccbca2a1-3575-4209-bf34-d3812ad3bc1a","Type":"ContainerDied","Data":"6167d87568a8548531da406b3fefb94d1e9370b6aaf274d25d23da4dc5d2ae13"} Dec 08 21:36:24 crc kubenswrapper[4791]: I1208 21:36:24.411175 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" event={"ID":"ccbca2a1-3575-4209-bf34-d3812ad3bc1a","Type":"ContainerStarted","Data":"24d310827f9119ef0069d2425dc5469b66c86f64fdcedadcc9098def7166c939"} Dec 08 21:36:30 crc kubenswrapper[4791]: I1208 21:36:30.460557 4791 generic.go:334] "Generic (PLEG): container finished" podID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerID="cd941beeab422e103cc8453a602f7b61901f8b0a7c73a863c6664b56d5359258" exitCode=0 Dec 08 21:36:30 crc kubenswrapper[4791]: I1208 21:36:30.460760 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" event={"ID":"ccbca2a1-3575-4209-bf34-d3812ad3bc1a","Type":"ContainerDied","Data":"cd941beeab422e103cc8453a602f7b61901f8b0a7c73a863c6664b56d5359258"} Dec 08 21:36:31 crc kubenswrapper[4791]: I1208 21:36:31.475501 4791 generic.go:334] "Generic (PLEG): container finished" podID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerID="c4b425e2f5a28c6a45ccba7079743c0fe165a639b266be4f466a56c556955328" exitCode=0 Dec 08 21:36:31 crc kubenswrapper[4791]: I1208 21:36:31.475612 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" event={"ID":"ccbca2a1-3575-4209-bf34-d3812ad3bc1a","Type":"ContainerDied","Data":"c4b425e2f5a28c6a45ccba7079743c0fe165a639b266be4f466a56c556955328"} Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.764402 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.878159 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-bundle\") pod \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.878386 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf8hc\" (UniqueName: \"kubernetes.io/projected/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-kube-api-access-bf8hc\") pod \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.878468 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-util\") pod \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\" (UID: \"ccbca2a1-3575-4209-bf34-d3812ad3bc1a\") " Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.879513 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-bundle" (OuterVolumeSpecName: "bundle") pod "ccbca2a1-3575-4209-bf34-d3812ad3bc1a" (UID: "ccbca2a1-3575-4209-bf34-d3812ad3bc1a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.886775 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-kube-api-access-bf8hc" (OuterVolumeSpecName: "kube-api-access-bf8hc") pod "ccbca2a1-3575-4209-bf34-d3812ad3bc1a" (UID: "ccbca2a1-3575-4209-bf34-d3812ad3bc1a"). InnerVolumeSpecName "kube-api-access-bf8hc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.889667 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-util" (OuterVolumeSpecName: "util") pod "ccbca2a1-3575-4209-bf34-d3812ad3bc1a" (UID: "ccbca2a1-3575-4209-bf34-d3812ad3bc1a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.980839 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf8hc\" (UniqueName: \"kubernetes.io/projected/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-kube-api-access-bf8hc\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.980889 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:32 crc kubenswrapper[4791]: I1208 21:36:32.980908 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccbca2a1-3575-4209-bf34-d3812ad3bc1a-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:33 crc kubenswrapper[4791]: I1208 21:36:33.492546 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" event={"ID":"ccbca2a1-3575-4209-bf34-d3812ad3bc1a","Type":"ContainerDied","Data":"24d310827f9119ef0069d2425dc5469b66c86f64fdcedadcc9098def7166c939"} Dec 08 21:36:33 crc kubenswrapper[4791]: I1208 21:36:33.492588 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24d310827f9119ef0069d2425dc5469b66c86f64fdcedadcc9098def7166c939" Dec 08 21:36:33 crc kubenswrapper[4791]: I1208 21:36:33.492617 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.065226 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7"] Dec 08 21:36:46 crc kubenswrapper[4791]: E1208 21:36:46.066052 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerName="extract" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.066065 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerName="extract" Dec 08 21:36:46 crc kubenswrapper[4791]: E1208 21:36:46.066088 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerName="util" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.066095 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerName="util" Dec 08 21:36:46 crc kubenswrapper[4791]: E1208 21:36:46.066104 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerName="pull" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.066109 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerName="pull" Dec 08 21:36:46 crc kubenswrapper[4791]: E1208 21:36:46.066121 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" containerName="console" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.066127 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" containerName="console" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.066255 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="23f9c9e4-62e5-4f8a-8a0f-1e444a2ff4fa" containerName="console" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.066268 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccbca2a1-3575-4209-bf34-d3812ad3bc1a" containerName="extract" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.066821 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.069173 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.069651 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.069846 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.069987 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-ljsmh" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.070755 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.159138 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7"] Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.192447 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-apiservice-cert\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.192508 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vmbd\" (UniqueName: \"kubernetes.io/projected/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-kube-api-access-8vmbd\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.192637 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-webhook-cert\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.294498 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-apiservice-cert\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.294555 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vmbd\" (UniqueName: \"kubernetes.io/projected/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-kube-api-access-8vmbd\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.294730 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-webhook-cert\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.304204 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-apiservice-cert\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.313760 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-webhook-cert\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.320356 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vmbd\" (UniqueName: \"kubernetes.io/projected/7e0400bf-de7f-4e28-bcd5-4602c8fe0724-kube-api-access-8vmbd\") pod \"metallb-operator-controller-manager-7c8c9ff997-ltwq7\" (UID: \"7e0400bf-de7f-4e28-bcd5-4602c8fe0724\") " pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.387147 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.472398 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz"] Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.474064 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.481263 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.481580 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-nb29j" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.481594 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.502797 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz"] Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.601404 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f684s\" (UniqueName: \"kubernetes.io/projected/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-kube-api-access-f684s\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.601477 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-apiservice-cert\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.601511 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-webhook-cert\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.703014 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-apiservice-cert\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.703063 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-webhook-cert\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.703152 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f684s\" (UniqueName: \"kubernetes.io/projected/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-kube-api-access-f684s\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.708088 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-webhook-cert\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.709108 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-apiservice-cert\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.719881 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f684s\" (UniqueName: \"kubernetes.io/projected/c496942f-e4dd-42ac-b2ce-d6deb5c58e4e-kube-api-access-f684s\") pod \"metallb-operator-webhook-server-5998c6b5bd-7wllz\" (UID: \"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e\") " pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.802413 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:46 crc kubenswrapper[4791]: I1208 21:36:46.975901 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7"] Dec 08 21:36:47 crc kubenswrapper[4791]: I1208 21:36:47.289971 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz"] Dec 08 21:36:47 crc kubenswrapper[4791]: I1208 21:36:47.605633 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" event={"ID":"7e0400bf-de7f-4e28-bcd5-4602c8fe0724","Type":"ContainerStarted","Data":"344cfa8020eef35b6f4839dd0b62d095b2ec392864b46fcb65efc373348fd3c6"} Dec 08 21:36:47 crc kubenswrapper[4791]: I1208 21:36:47.605668 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" event={"ID":"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e","Type":"ContainerStarted","Data":"8a1adcf4a4e4f0111f60c587545d18680d87aed5baf3e8ada6f86b378886d5c6"} Dec 08 21:36:52 crc kubenswrapper[4791]: I1208 21:36:52.653282 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" event={"ID":"7e0400bf-de7f-4e28-bcd5-4602c8fe0724","Type":"ContainerStarted","Data":"fd66613e455b2d466e02b859ee0defc8e6ae322ebdfa4ecc8b67e1b4c66ebc8e"} Dec 08 21:36:52 crc kubenswrapper[4791]: I1208 21:36:52.654700 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:36:52 crc kubenswrapper[4791]: I1208 21:36:52.659977 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" event={"ID":"c496942f-e4dd-42ac-b2ce-d6deb5c58e4e","Type":"ContainerStarted","Data":"51ed62ee0b281e824e1bf8be06cfa1a135af7a7629e129dcdf51336aa62f53d4"} Dec 08 21:36:52 crc kubenswrapper[4791]: I1208 21:36:52.661124 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:36:52 crc kubenswrapper[4791]: I1208 21:36:52.677310 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" podStartSLOduration=1.5137447229999998 podStartE2EDuration="6.677292171s" podCreationTimestamp="2025-12-08 21:36:46 +0000 UTC" firstStartedPulling="2025-12-08 21:36:47.011548746 +0000 UTC m=+1083.710307091" lastFinishedPulling="2025-12-08 21:36:52.175096194 +0000 UTC m=+1088.873854539" observedRunningTime="2025-12-08 21:36:52.676077611 +0000 UTC m=+1089.374835956" watchObservedRunningTime="2025-12-08 21:36:52.677292171 +0000 UTC m=+1089.376050516" Dec 08 21:36:52 crc kubenswrapper[4791]: I1208 21:36:52.703215 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" podStartSLOduration=1.8049496600000001 podStartE2EDuration="6.703199083s" podCreationTimestamp="2025-12-08 21:36:46 +0000 UTC" firstStartedPulling="2025-12-08 21:36:47.294047238 +0000 UTC m=+1083.992805583" lastFinishedPulling="2025-12-08 21:36:52.192296661 +0000 UTC m=+1088.891055006" observedRunningTime="2025-12-08 21:36:52.698886227 +0000 UTC m=+1089.397644602" watchObservedRunningTime="2025-12-08 21:36:52.703199083 +0000 UTC m=+1089.401957428" Dec 08 21:37:06 crc kubenswrapper[4791]: I1208 21:37:06.810045 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5998c6b5bd-7wllz" Dec 08 21:37:26 crc kubenswrapper[4791]: I1208 21:37:26.390317 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7c8c9ff997-ltwq7" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.269175 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-2725l"] Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.273728 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.277012 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.277051 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.277023 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-ndkpm" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.297803 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks"] Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.299148 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.302397 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.322788 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks"] Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.382645 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics-certs\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.382691 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-conf\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.382741 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9q64\" (UniqueName: \"kubernetes.io/projected/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-kube-api-access-z9q64\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.382763 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.382794 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-startup\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.382918 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-reloader\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.382959 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftl7p\" (UniqueName: \"kubernetes.io/projected/e25fb151-fa5d-4cbe-804c-6078095f6d70-kube-api-access-ftl7p\") pod \"frr-k8s-webhook-server-7fcb986d4-kf5ks\" (UID: \"e25fb151-fa5d-4cbe-804c-6078095f6d70\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.383035 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e25fb151-fa5d-4cbe-804c-6078095f6d70-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-kf5ks\" (UID: \"e25fb151-fa5d-4cbe-804c-6078095f6d70\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.383066 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-sockets\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.388799 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-7zhfz"] Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.398080 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.409946 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.412789 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.412946 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-g7dr4" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.413424 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.414936 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-wbgwq"] Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.416406 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.420702 4791 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.425786 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-wbgwq"] Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484201 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-startup\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484254 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-reloader\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484282 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftl7p\" (UniqueName: \"kubernetes.io/projected/e25fb151-fa5d-4cbe-804c-6078095f6d70-kube-api-access-ftl7p\") pod \"frr-k8s-webhook-server-7fcb986d4-kf5ks\" (UID: \"e25fb151-fa5d-4cbe-804c-6078095f6d70\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484305 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ffeac0c0-7221-4e0b-a48f-c457875da8f7-metrics-certs\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484322 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-446dp\" (UniqueName: \"kubernetes.io/projected/ffeac0c0-7221-4e0b-a48f-c457875da8f7-kube-api-access-446dp\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484359 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-metrics-certs\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484377 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e25fb151-fa5d-4cbe-804c-6078095f6d70-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-kf5ks\" (UID: \"e25fb151-fa5d-4cbe-804c-6078095f6d70\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484396 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-metallb-excludel2\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484411 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-sockets\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484444 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484465 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffeac0c0-7221-4e0b-a48f-c457875da8f7-cert\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484517 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb59r\" (UniqueName: \"kubernetes.io/projected/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-kube-api-access-rb59r\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484546 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics-certs\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484561 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-conf\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484577 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9q64\" (UniqueName: \"kubernetes.io/projected/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-kube-api-access-z9q64\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.484590 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.485001 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.485691 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-startup\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.485889 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-reloader\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: E1208 21:37:27.486921 4791 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 08 21:37:27 crc kubenswrapper[4791]: E1208 21:37:27.486970 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics-certs podName:e9d59a9f-d637-407a-a0f9-ae4f4e765b14 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:27.986956068 +0000 UTC m=+1124.685714413 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics-certs") pod "frr-k8s-2725l" (UID: "e9d59a9f-d637-407a-a0f9-ae4f4e765b14") : secret "frr-k8s-certs-secret" not found Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.487256 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-conf\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.488309 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-frr-sockets\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.491211 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e25fb151-fa5d-4cbe-804c-6078095f6d70-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-kf5ks\" (UID: \"e25fb151-fa5d-4cbe-804c-6078095f6d70\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.508249 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9q64\" (UniqueName: \"kubernetes.io/projected/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-kube-api-access-z9q64\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.512415 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftl7p\" (UniqueName: \"kubernetes.io/projected/e25fb151-fa5d-4cbe-804c-6078095f6d70-kube-api-access-ftl7p\") pod \"frr-k8s-webhook-server-7fcb986d4-kf5ks\" (UID: \"e25fb151-fa5d-4cbe-804c-6078095f6d70\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.586613 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.586734 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffeac0c0-7221-4e0b-a48f-c457875da8f7-cert\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.586812 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb59r\" (UniqueName: \"kubernetes.io/projected/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-kube-api-access-rb59r\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: E1208 21:37:27.586816 4791 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 08 21:37:27 crc kubenswrapper[4791]: E1208 21:37:27.586913 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist podName:6cf2cc89-0735-40c9-bdee-13e18de0a9ea nodeName:}" failed. No retries permitted until 2025-12-08 21:37:28.086890261 +0000 UTC m=+1124.785648666 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist") pod "speaker-7zhfz" (UID: "6cf2cc89-0735-40c9-bdee-13e18de0a9ea") : secret "metallb-memberlist" not found Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.586930 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ffeac0c0-7221-4e0b-a48f-c457875da8f7-metrics-certs\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.586956 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-446dp\" (UniqueName: \"kubernetes.io/projected/ffeac0c0-7221-4e0b-a48f-c457875da8f7-kube-api-access-446dp\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.586997 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-metrics-certs\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.587023 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-metallb-excludel2\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.588021 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-metallb-excludel2\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.590129 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffeac0c0-7221-4e0b-a48f-c457875da8f7-cert\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.594348 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-metrics-certs\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.604544 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ffeac0c0-7221-4e0b-a48f-c457875da8f7-metrics-certs\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.610478 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb59r\" (UniqueName: \"kubernetes.io/projected/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-kube-api-access-rb59r\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.618855 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.643106 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-446dp\" (UniqueName: \"kubernetes.io/projected/ffeac0c0-7221-4e0b-a48f-c457875da8f7-kube-api-access-446dp\") pod \"controller-f8648f98b-wbgwq\" (UID: \"ffeac0c0-7221-4e0b-a48f-c457875da8f7\") " pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.749065 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:27 crc kubenswrapper[4791]: I1208 21:37:27.994599 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics-certs\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.000338 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9d59a9f-d637-407a-a0f9-ae4f4e765b14-metrics-certs\") pod \"frr-k8s-2725l\" (UID: \"e9d59a9f-d637-407a-a0f9-ae4f4e765b14\") " pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.096371 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:28 crc kubenswrapper[4791]: E1208 21:37:28.096565 4791 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 08 21:37:28 crc kubenswrapper[4791]: E1208 21:37:28.096643 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist podName:6cf2cc89-0735-40c9-bdee-13e18de0a9ea nodeName:}" failed. No retries permitted until 2025-12-08 21:37:29.096626346 +0000 UTC m=+1125.795384691 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist") pod "speaker-7zhfz" (UID: "6cf2cc89-0735-40c9-bdee-13e18de0a9ea") : secret "metallb-memberlist" not found Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.169574 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks"] Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.194459 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.258354 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-wbgwq"] Dec 08 21:37:28 crc kubenswrapper[4791]: W1208 21:37:28.269661 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podffeac0c0_7221_4e0b_a48f_c457875da8f7.slice/crio-a8343c5ed3aade94b8d3d2448216fa12d717474b0bbba226d6fb7a89b52b317f WatchSource:0}: Error finding container a8343c5ed3aade94b8d3d2448216fa12d717474b0bbba226d6fb7a89b52b317f: Status 404 returned error can't find the container with id a8343c5ed3aade94b8d3d2448216fa12d717474b0bbba226d6fb7a89b52b317f Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.921980 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" event={"ID":"e25fb151-fa5d-4cbe-804c-6078095f6d70","Type":"ContainerStarted","Data":"c82e8d2a73fa8abf67adf79584bc5ea765173fa9f90d7c2f0e59401ff0a9e5f2"} Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.923499 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerStarted","Data":"2bb1c38a27f2222140eb0a9616d789475a783e78223ff51d693cc97d2ff3306d"} Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.925495 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-wbgwq" event={"ID":"ffeac0c0-7221-4e0b-a48f-c457875da8f7","Type":"ContainerStarted","Data":"bfcf7771e744f08ce29b2d944f0ab8e01afd78609daf3a60ca4c2340c4d0cf2a"} Dec 08 21:37:28 crc kubenswrapper[4791]: I1208 21:37:28.925532 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-wbgwq" event={"ID":"ffeac0c0-7221-4e0b-a48f-c457875da8f7","Type":"ContainerStarted","Data":"a8343c5ed3aade94b8d3d2448216fa12d717474b0bbba226d6fb7a89b52b317f"} Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.114953 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.121459 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6cf2cc89-0735-40c9-bdee-13e18de0a9ea-memberlist\") pod \"speaker-7zhfz\" (UID: \"6cf2cc89-0735-40c9-bdee-13e18de0a9ea\") " pod="metallb-system/speaker-7zhfz" Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.241812 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7zhfz" Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.940611 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-wbgwq" event={"ID":"ffeac0c0-7221-4e0b-a48f-c457875da8f7","Type":"ContainerStarted","Data":"86a51756f1597c73f6cf1015ba8accec4823f8fde4eaecb46eb22aa94410946a"} Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.943567 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7zhfz" event={"ID":"6cf2cc89-0735-40c9-bdee-13e18de0a9ea","Type":"ContainerStarted","Data":"e4ef1d44c5b3729db8a29213a0c5b4af99f29fdb6be3deee4d7a52b71f4e4c2e"} Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.943624 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7zhfz" event={"ID":"6cf2cc89-0735-40c9-bdee-13e18de0a9ea","Type":"ContainerStarted","Data":"3af93954d13cca876b8a128eac08df02b3acd3b62a164a8bd244c28965b89640"} Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.943637 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7zhfz" event={"ID":"6cf2cc89-0735-40c9-bdee-13e18de0a9ea","Type":"ContainerStarted","Data":"5a71552b550d36ec89a08d8cd136a29db2b9d40fd67f6a9eaf0069cbd7065222"} Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.943837 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-7zhfz" Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.967076 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-wbgwq" podStartSLOduration=2.967027096 podStartE2EDuration="2.967027096s" podCreationTimestamp="2025-12-08 21:37:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:37:29.960005791 +0000 UTC m=+1126.658764136" watchObservedRunningTime="2025-12-08 21:37:29.967027096 +0000 UTC m=+1126.665785441" Dec 08 21:37:29 crc kubenswrapper[4791]: I1208 21:37:29.978468 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-7zhfz" podStartSLOduration=2.978451841 podStartE2EDuration="2.978451841s" podCreationTimestamp="2025-12-08 21:37:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:37:29.976750459 +0000 UTC m=+1126.675508824" watchObservedRunningTime="2025-12-08 21:37:29.978451841 +0000 UTC m=+1126.677210186" Dec 08 21:37:30 crc kubenswrapper[4791]: I1208 21:37:30.954586 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:35 crc kubenswrapper[4791]: I1208 21:37:35.253417 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:37:35 crc kubenswrapper[4791]: I1208 21:37:35.254008 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:37:36 crc kubenswrapper[4791]: I1208 21:37:36.018085 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" event={"ID":"e25fb151-fa5d-4cbe-804c-6078095f6d70","Type":"ContainerStarted","Data":"d6319d1bf562bfb93c1cccd124d9f325d56f930b65bda712e7c6b850475ec5a8"} Dec 08 21:37:36 crc kubenswrapper[4791]: I1208 21:37:36.018384 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:36 crc kubenswrapper[4791]: I1208 21:37:36.020541 4791 generic.go:334] "Generic (PLEG): container finished" podID="e9d59a9f-d637-407a-a0f9-ae4f4e765b14" containerID="aa74a873e1212367302de79f3fb919f26452733f746ee69cec40df3ace829d34" exitCode=0 Dec 08 21:37:36 crc kubenswrapper[4791]: I1208 21:37:36.020594 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerDied","Data":"aa74a873e1212367302de79f3fb919f26452733f746ee69cec40df3ace829d34"} Dec 08 21:37:36 crc kubenswrapper[4791]: I1208 21:37:36.039282 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" podStartSLOduration=1.760263771 podStartE2EDuration="9.039262446s" podCreationTimestamp="2025-12-08 21:37:27 +0000 UTC" firstStartedPulling="2025-12-08 21:37:28.184157659 +0000 UTC m=+1124.882915994" lastFinishedPulling="2025-12-08 21:37:35.463156314 +0000 UTC m=+1132.161914669" observedRunningTime="2025-12-08 21:37:36.032917258 +0000 UTC m=+1132.731675603" watchObservedRunningTime="2025-12-08 21:37:36.039262446 +0000 UTC m=+1132.738020791" Dec 08 21:37:37 crc kubenswrapper[4791]: I1208 21:37:37.030353 4791 generic.go:334] "Generic (PLEG): container finished" podID="e9d59a9f-d637-407a-a0f9-ae4f4e765b14" containerID="d156ce4ba0c7de580037fbd794c0bf55053dd59e31eade56f31e75b6eda66fae" exitCode=0 Dec 08 21:37:37 crc kubenswrapper[4791]: I1208 21:37:37.031541 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerDied","Data":"d156ce4ba0c7de580037fbd794c0bf55053dd59e31eade56f31e75b6eda66fae"} Dec 08 21:37:38 crc kubenswrapper[4791]: I1208 21:37:38.040345 4791 generic.go:334] "Generic (PLEG): container finished" podID="e9d59a9f-d637-407a-a0f9-ae4f4e765b14" containerID="a4d8abbdf63c3679a5f3fb3829232a45099a929a2121d233714c35ffd9bb0813" exitCode=0 Dec 08 21:37:38 crc kubenswrapper[4791]: I1208 21:37:38.040440 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerDied","Data":"a4d8abbdf63c3679a5f3fb3829232a45099a929a2121d233714c35ffd9bb0813"} Dec 08 21:37:39 crc kubenswrapper[4791]: I1208 21:37:39.053909 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerStarted","Data":"79ba657c2bedd4a2db2e5931881f5355c6d7eca8906724a2545e2aaa86f7db8b"} Dec 08 21:37:39 crc kubenswrapper[4791]: I1208 21:37:39.054484 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerStarted","Data":"e859cf3a34790719f0b483e4cf3ab57affe380eccffcae212ec4e81dfb135e16"} Dec 08 21:37:39 crc kubenswrapper[4791]: I1208 21:37:39.054498 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerStarted","Data":"afdbc3e06132cdd891fdc3cb0a2bac9eb86b65bd21e5d1deb15493184031c521"} Dec 08 21:37:39 crc kubenswrapper[4791]: I1208 21:37:39.054512 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerStarted","Data":"2227e4f2a6630d87ff81f5a1fe59a143777dbc6864601117806d0f4aa171ad7e"} Dec 08 21:37:39 crc kubenswrapper[4791]: I1208 21:37:39.054526 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerStarted","Data":"e80148595ef632ade0de1fc25a4920469b8fcc099d6f04bbdbd1530e63fa0aac"} Dec 08 21:37:39 crc kubenswrapper[4791]: I1208 21:37:39.246313 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-7zhfz" Dec 08 21:37:40 crc kubenswrapper[4791]: I1208 21:37:40.065921 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2725l" event={"ID":"e9d59a9f-d637-407a-a0f9-ae4f4e765b14","Type":"ContainerStarted","Data":"56c17a17156fe84677b04fff2ef9758db492a4e564c022ed7e308280107fd6f1"} Dec 08 21:37:40 crc kubenswrapper[4791]: I1208 21:37:40.066151 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:40 crc kubenswrapper[4791]: I1208 21:37:40.092629 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-2725l" podStartSLOduration=6.449295866 podStartE2EDuration="13.092604033s" podCreationTimestamp="2025-12-08 21:37:27 +0000 UTC" firstStartedPulling="2025-12-08 21:37:28.842357849 +0000 UTC m=+1125.541116204" lastFinishedPulling="2025-12-08 21:37:35.485666026 +0000 UTC m=+1132.184424371" observedRunningTime="2025-12-08 21:37:40.092436119 +0000 UTC m=+1136.791194464" watchObservedRunningTime="2025-12-08 21:37:40.092604033 +0000 UTC m=+1136.791362378" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.231487 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-qfcxg"] Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.233005 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qfcxg" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.239679 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-69c67" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.240939 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.244416 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.253571 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qfcxg"] Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.377958 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs4rn\" (UniqueName: \"kubernetes.io/projected/78607043-6ad9-43b9-b368-f038ebc2461c-kube-api-access-cs4rn\") pod \"openstack-operator-index-qfcxg\" (UID: \"78607043-6ad9-43b9-b368-f038ebc2461c\") " pod="openstack-operators/openstack-operator-index-qfcxg" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.479832 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs4rn\" (UniqueName: \"kubernetes.io/projected/78607043-6ad9-43b9-b368-f038ebc2461c-kube-api-access-cs4rn\") pod \"openstack-operator-index-qfcxg\" (UID: \"78607043-6ad9-43b9-b368-f038ebc2461c\") " pod="openstack-operators/openstack-operator-index-qfcxg" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.499310 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs4rn\" (UniqueName: \"kubernetes.io/projected/78607043-6ad9-43b9-b368-f038ebc2461c-kube-api-access-cs4rn\") pod \"openstack-operator-index-qfcxg\" (UID: \"78607043-6ad9-43b9-b368-f038ebc2461c\") " pod="openstack-operators/openstack-operator-index-qfcxg" Dec 08 21:37:42 crc kubenswrapper[4791]: I1208 21:37:42.577836 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qfcxg" Dec 08 21:37:43 crc kubenswrapper[4791]: I1208 21:37:43.069559 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qfcxg"] Dec 08 21:37:43 crc kubenswrapper[4791]: W1208 21:37:43.077556 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78607043_6ad9_43b9_b368_f038ebc2461c.slice/crio-779e380ecb89d198324d46a089100f223b064f3ee0ee24537a58174a060d88b5 WatchSource:0}: Error finding container 779e380ecb89d198324d46a089100f223b064f3ee0ee24537a58174a060d88b5: Status 404 returned error can't find the container with id 779e380ecb89d198324d46a089100f223b064f3ee0ee24537a58174a060d88b5 Dec 08 21:37:43 crc kubenswrapper[4791]: I1208 21:37:43.093567 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qfcxg" event={"ID":"78607043-6ad9-43b9-b368-f038ebc2461c","Type":"ContainerStarted","Data":"779e380ecb89d198324d46a089100f223b064f3ee0ee24537a58174a060d88b5"} Dec 08 21:37:43 crc kubenswrapper[4791]: I1208 21:37:43.195763 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:43 crc kubenswrapper[4791]: I1208 21:37:43.237461 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:45 crc kubenswrapper[4791]: I1208 21:37:45.610650 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qfcxg"] Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.218011 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-szl4j"] Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.219350 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.242382 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-szl4j"] Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.386817 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2dvb\" (UniqueName: \"kubernetes.io/projected/6b293818-e753-40aa-88ea-04fb63c0188c-kube-api-access-q2dvb\") pod \"openstack-operator-index-szl4j\" (UID: \"6b293818-e753-40aa-88ea-04fb63c0188c\") " pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.489051 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2dvb\" (UniqueName: \"kubernetes.io/projected/6b293818-e753-40aa-88ea-04fb63c0188c-kube-api-access-q2dvb\") pod \"openstack-operator-index-szl4j\" (UID: \"6b293818-e753-40aa-88ea-04fb63c0188c\") " pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.509322 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2dvb\" (UniqueName: \"kubernetes.io/projected/6b293818-e753-40aa-88ea-04fb63c0188c-kube-api-access-q2dvb\") pod \"openstack-operator-index-szl4j\" (UID: \"6b293818-e753-40aa-88ea-04fb63c0188c\") " pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.573062 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:46 crc kubenswrapper[4791]: W1208 21:37:46.986787 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b293818_e753_40aa_88ea_04fb63c0188c.slice/crio-5155f0d7d1e275052690db015cb825d9d475d0bcb14cc54bf2ca1d2779e1053f WatchSource:0}: Error finding container 5155f0d7d1e275052690db015cb825d9d475d0bcb14cc54bf2ca1d2779e1053f: Status 404 returned error can't find the container with id 5155f0d7d1e275052690db015cb825d9d475d0bcb14cc54bf2ca1d2779e1053f Dec 08 21:37:46 crc kubenswrapper[4791]: I1208 21:37:46.996773 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-szl4j"] Dec 08 21:37:47 crc kubenswrapper[4791]: I1208 21:37:47.128562 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-szl4j" event={"ID":"6b293818-e753-40aa-88ea-04fb63c0188c","Type":"ContainerStarted","Data":"5155f0d7d1e275052690db015cb825d9d475d0bcb14cc54bf2ca1d2779e1053f"} Dec 08 21:37:47 crc kubenswrapper[4791]: I1208 21:37:47.626051 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kf5ks" Dec 08 21:37:47 crc kubenswrapper[4791]: I1208 21:37:47.761081 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-wbgwq" Dec 08 21:37:48 crc kubenswrapper[4791]: I1208 21:37:48.197437 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-2725l" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.202417 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-szl4j" event={"ID":"6b293818-e753-40aa-88ea-04fb63c0188c","Type":"ContainerStarted","Data":"cc5a2fc28351ecf16d26d6ce06261bccde96d15a2dd6f2fed1d593f38fe822ac"} Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.204579 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qfcxg" event={"ID":"78607043-6ad9-43b9-b368-f038ebc2461c","Type":"ContainerStarted","Data":"c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c"} Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.204775 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-qfcxg" podUID="78607043-6ad9-43b9-b368-f038ebc2461c" containerName="registry-server" containerID="cri-o://c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c" gracePeriod=2 Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.220880 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-szl4j" podStartSLOduration=1.625623515 podStartE2EDuration="10.220852795s" podCreationTimestamp="2025-12-08 21:37:46 +0000 UTC" firstStartedPulling="2025-12-08 21:37:46.989511016 +0000 UTC m=+1143.688269361" lastFinishedPulling="2025-12-08 21:37:55.584740296 +0000 UTC m=+1152.283498641" observedRunningTime="2025-12-08 21:37:56.217602324 +0000 UTC m=+1152.916360679" watchObservedRunningTime="2025-12-08 21:37:56.220852795 +0000 UTC m=+1152.919611140" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.242806 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-qfcxg" podStartSLOduration=1.740061484 podStartE2EDuration="14.242781542s" podCreationTimestamp="2025-12-08 21:37:42 +0000 UTC" firstStartedPulling="2025-12-08 21:37:43.080161242 +0000 UTC m=+1139.778919587" lastFinishedPulling="2025-12-08 21:37:55.5828813 +0000 UTC m=+1152.281639645" observedRunningTime="2025-12-08 21:37:56.234871725 +0000 UTC m=+1152.933630080" watchObservedRunningTime="2025-12-08 21:37:56.242781542 +0000 UTC m=+1152.941539907" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.574002 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.574047 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.606223 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.662540 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qfcxg" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.794447 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs4rn\" (UniqueName: \"kubernetes.io/projected/78607043-6ad9-43b9-b368-f038ebc2461c-kube-api-access-cs4rn\") pod \"78607043-6ad9-43b9-b368-f038ebc2461c\" (UID: \"78607043-6ad9-43b9-b368-f038ebc2461c\") " Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.801779 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78607043-6ad9-43b9-b368-f038ebc2461c-kube-api-access-cs4rn" (OuterVolumeSpecName: "kube-api-access-cs4rn") pod "78607043-6ad9-43b9-b368-f038ebc2461c" (UID: "78607043-6ad9-43b9-b368-f038ebc2461c"). InnerVolumeSpecName "kube-api-access-cs4rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:37:56 crc kubenswrapper[4791]: I1208 21:37:56.897390 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs4rn\" (UniqueName: \"kubernetes.io/projected/78607043-6ad9-43b9-b368-f038ebc2461c-kube-api-access-cs4rn\") on node \"crc\" DevicePath \"\"" Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.212070 4791 generic.go:334] "Generic (PLEG): container finished" podID="78607043-6ad9-43b9-b368-f038ebc2461c" containerID="c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c" exitCode=0 Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.212144 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qfcxg" Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.212175 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qfcxg" event={"ID":"78607043-6ad9-43b9-b368-f038ebc2461c","Type":"ContainerDied","Data":"c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c"} Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.212218 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qfcxg" event={"ID":"78607043-6ad9-43b9-b368-f038ebc2461c","Type":"ContainerDied","Data":"779e380ecb89d198324d46a089100f223b064f3ee0ee24537a58174a060d88b5"} Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.212235 4791 scope.go:117] "RemoveContainer" containerID="c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c" Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.227594 4791 scope.go:117] "RemoveContainer" containerID="c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c" Dec 08 21:37:57 crc kubenswrapper[4791]: E1208 21:37:57.228075 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c\": container with ID starting with c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c not found: ID does not exist" containerID="c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c" Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.228104 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c"} err="failed to get container status \"c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c\": rpc error: code = NotFound desc = could not find container \"c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c\": container with ID starting with c4f2b93c1e8e1ded8d159f31037784f5178b08455d9a15c32ccc495573a60b6c not found: ID does not exist" Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.243457 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qfcxg"] Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.249293 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-qfcxg"] Dec 08 21:37:57 crc kubenswrapper[4791]: I1208 21:37:57.614091 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78607043-6ad9-43b9-b368-f038ebc2461c" path="/var/lib/kubelet/pods/78607043-6ad9-43b9-b368-f038ebc2461c/volumes" Dec 08 21:38:05 crc kubenswrapper[4791]: I1208 21:38:05.251519 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:38:05 crc kubenswrapper[4791]: I1208 21:38:05.251860 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:38:06 crc kubenswrapper[4791]: I1208 21:38:06.613318 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-szl4j" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.391603 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx"] Dec 08 21:38:13 crc kubenswrapper[4791]: E1208 21:38:13.392585 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78607043-6ad9-43b9-b368-f038ebc2461c" containerName="registry-server" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.392600 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="78607043-6ad9-43b9-b368-f038ebc2461c" containerName="registry-server" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.392772 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="78607043-6ad9-43b9-b368-f038ebc2461c" containerName="registry-server" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.393978 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.396145 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dksc6" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.402567 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx"] Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.403615 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr5mg\" (UniqueName: \"kubernetes.io/projected/d6b28097-7d9a-453d-9f3e-25998fbd0181-kube-api-access-hr5mg\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.403688 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-util\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.403903 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-bundle\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.505767 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-bundle\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.505888 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr5mg\" (UniqueName: \"kubernetes.io/projected/d6b28097-7d9a-453d-9f3e-25998fbd0181-kube-api-access-hr5mg\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.505949 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-util\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.506350 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-bundle\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.506518 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-util\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.524721 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr5mg\" (UniqueName: \"kubernetes.io/projected/d6b28097-7d9a-453d-9f3e-25998fbd0181-kube-api-access-hr5mg\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:13 crc kubenswrapper[4791]: I1208 21:38:13.712467 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:14 crc kubenswrapper[4791]: I1208 21:38:14.118628 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx"] Dec 08 21:38:14 crc kubenswrapper[4791]: I1208 21:38:14.381920 4791 generic.go:334] "Generic (PLEG): container finished" podID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerID="c7f50d0e5808d7d89f3807337791d415ea035242987fd9facb78d388916e0974" exitCode=0 Dec 08 21:38:14 crc kubenswrapper[4791]: I1208 21:38:14.381977 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" event={"ID":"d6b28097-7d9a-453d-9f3e-25998fbd0181","Type":"ContainerDied","Data":"c7f50d0e5808d7d89f3807337791d415ea035242987fd9facb78d388916e0974"} Dec 08 21:38:14 crc kubenswrapper[4791]: I1208 21:38:14.382022 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" event={"ID":"d6b28097-7d9a-453d-9f3e-25998fbd0181","Type":"ContainerStarted","Data":"c030d66611910e0910cd0eeff886e3a4cf1ab572aac1b96dcc927b4ec19c573d"} Dec 08 21:38:15 crc kubenswrapper[4791]: I1208 21:38:15.392655 4791 generic.go:334] "Generic (PLEG): container finished" podID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerID="72e4e04618fe25df3eb1eaa9f21091b3bdb574e881b2685570dd761d0872027d" exitCode=0 Dec 08 21:38:15 crc kubenswrapper[4791]: I1208 21:38:15.392733 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" event={"ID":"d6b28097-7d9a-453d-9f3e-25998fbd0181","Type":"ContainerDied","Data":"72e4e04618fe25df3eb1eaa9f21091b3bdb574e881b2685570dd761d0872027d"} Dec 08 21:38:16 crc kubenswrapper[4791]: I1208 21:38:16.403683 4791 generic.go:334] "Generic (PLEG): container finished" podID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerID="6b0e95bc4f67dea9c5d36fbabceb1b00a88a5f599007056921f2ad213b0afb98" exitCode=0 Dec 08 21:38:16 crc kubenswrapper[4791]: I1208 21:38:16.404094 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" event={"ID":"d6b28097-7d9a-453d-9f3e-25998fbd0181","Type":"ContainerDied","Data":"6b0e95bc4f67dea9c5d36fbabceb1b00a88a5f599007056921f2ad213b0afb98"} Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.741438 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.783385 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr5mg\" (UniqueName: \"kubernetes.io/projected/d6b28097-7d9a-453d-9f3e-25998fbd0181-kube-api-access-hr5mg\") pod \"d6b28097-7d9a-453d-9f3e-25998fbd0181\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.783549 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-util\") pod \"d6b28097-7d9a-453d-9f3e-25998fbd0181\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.783651 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-bundle\") pod \"d6b28097-7d9a-453d-9f3e-25998fbd0181\" (UID: \"d6b28097-7d9a-453d-9f3e-25998fbd0181\") " Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.784295 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-bundle" (OuterVolumeSpecName: "bundle") pod "d6b28097-7d9a-453d-9f3e-25998fbd0181" (UID: "d6b28097-7d9a-453d-9f3e-25998fbd0181"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.793940 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6b28097-7d9a-453d-9f3e-25998fbd0181-kube-api-access-hr5mg" (OuterVolumeSpecName: "kube-api-access-hr5mg") pod "d6b28097-7d9a-453d-9f3e-25998fbd0181" (UID: "d6b28097-7d9a-453d-9f3e-25998fbd0181"). InnerVolumeSpecName "kube-api-access-hr5mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.800601 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-util" (OuterVolumeSpecName: "util") pod "d6b28097-7d9a-453d-9f3e-25998fbd0181" (UID: "d6b28097-7d9a-453d-9f3e-25998fbd0181"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.885560 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.885601 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b28097-7d9a-453d-9f3e-25998fbd0181-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:38:17 crc kubenswrapper[4791]: I1208 21:38:17.885611 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr5mg\" (UniqueName: \"kubernetes.io/projected/d6b28097-7d9a-453d-9f3e-25998fbd0181-kube-api-access-hr5mg\") on node \"crc\" DevicePath \"\"" Dec 08 21:38:18 crc kubenswrapper[4791]: I1208 21:38:18.421102 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" event={"ID":"d6b28097-7d9a-453d-9f3e-25998fbd0181","Type":"ContainerDied","Data":"c030d66611910e0910cd0eeff886e3a4cf1ab572aac1b96dcc927b4ec19c573d"} Dec 08 21:38:18 crc kubenswrapper[4791]: I1208 21:38:18.421147 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c030d66611910e0910cd0eeff886e3a4cf1ab572aac1b96dcc927b4ec19c573d" Dec 08 21:38:18 crc kubenswrapper[4791]: I1208 21:38:18.421213 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.902332 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb"] Dec 08 21:38:21 crc kubenswrapper[4791]: E1208 21:38:21.902780 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerName="extract" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.903117 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerName="extract" Dec 08 21:38:21 crc kubenswrapper[4791]: E1208 21:38:21.903161 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerName="util" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.903167 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerName="util" Dec 08 21:38:21 crc kubenswrapper[4791]: E1208 21:38:21.903179 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerName="pull" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.903187 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerName="pull" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.903336 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6b28097-7d9a-453d-9f3e-25998fbd0181" containerName="extract" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.903881 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.906381 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-c5mdc" Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.932113 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb"] Dec 08 21:38:21 crc kubenswrapper[4791]: I1208 21:38:21.967246 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx4l9\" (UniqueName: \"kubernetes.io/projected/97760082-6ccb-4973-9fb6-274647592883-kube-api-access-gx4l9\") pod \"openstack-operator-controller-operator-7d4449658c-pljtb\" (UID: \"97760082-6ccb-4973-9fb6-274647592883\") " pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" Dec 08 21:38:22 crc kubenswrapper[4791]: I1208 21:38:22.069274 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx4l9\" (UniqueName: \"kubernetes.io/projected/97760082-6ccb-4973-9fb6-274647592883-kube-api-access-gx4l9\") pod \"openstack-operator-controller-operator-7d4449658c-pljtb\" (UID: \"97760082-6ccb-4973-9fb6-274647592883\") " pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" Dec 08 21:38:22 crc kubenswrapper[4791]: I1208 21:38:22.095132 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx4l9\" (UniqueName: \"kubernetes.io/projected/97760082-6ccb-4973-9fb6-274647592883-kube-api-access-gx4l9\") pod \"openstack-operator-controller-operator-7d4449658c-pljtb\" (UID: \"97760082-6ccb-4973-9fb6-274647592883\") " pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" Dec 08 21:38:22 crc kubenswrapper[4791]: I1208 21:38:22.226396 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" Dec 08 21:38:22 crc kubenswrapper[4791]: I1208 21:38:22.759423 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb"] Dec 08 21:38:23 crc kubenswrapper[4791]: I1208 21:38:23.463387 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" event={"ID":"97760082-6ccb-4973-9fb6-274647592883","Type":"ContainerStarted","Data":"0e72c96f38c9b43dde100925141ea9af9f0883a1f86b5ffb456dce28714a0c96"} Dec 08 21:38:27 crc kubenswrapper[4791]: I1208 21:38:27.501365 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" event={"ID":"97760082-6ccb-4973-9fb6-274647592883","Type":"ContainerStarted","Data":"811a04e917add4ab8bb3f4df415c620e48320d9945c6735c281b15c4db2420cf"} Dec 08 21:38:27 crc kubenswrapper[4791]: I1208 21:38:27.502308 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" Dec 08 21:38:27 crc kubenswrapper[4791]: I1208 21:38:27.547698 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" podStartSLOduration=2.339047212 podStartE2EDuration="6.547678272s" podCreationTimestamp="2025-12-08 21:38:21 +0000 UTC" firstStartedPulling="2025-12-08 21:38:22.766567321 +0000 UTC m=+1179.465325666" lastFinishedPulling="2025-12-08 21:38:26.975198381 +0000 UTC m=+1183.673956726" observedRunningTime="2025-12-08 21:38:27.543127719 +0000 UTC m=+1184.241886064" watchObservedRunningTime="2025-12-08 21:38:27.547678272 +0000 UTC m=+1184.246436617" Dec 08 21:38:32 crc kubenswrapper[4791]: I1208 21:38:32.228494 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-pljtb" Dec 08 21:38:35 crc kubenswrapper[4791]: I1208 21:38:35.251970 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:38:35 crc kubenswrapper[4791]: I1208 21:38:35.252301 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:38:35 crc kubenswrapper[4791]: I1208 21:38:35.252360 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:38:35 crc kubenswrapper[4791]: I1208 21:38:35.253143 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2c818dc85098516c004b18fbf01aa57ea0f0a817f59523978cf0e86c2b78304e"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:38:35 crc kubenswrapper[4791]: I1208 21:38:35.253208 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://2c818dc85098516c004b18fbf01aa57ea0f0a817f59523978cf0e86c2b78304e" gracePeriod=600 Dec 08 21:38:36 crc kubenswrapper[4791]: I1208 21:38:36.571344 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="2c818dc85098516c004b18fbf01aa57ea0f0a817f59523978cf0e86c2b78304e" exitCode=0 Dec 08 21:38:36 crc kubenswrapper[4791]: I1208 21:38:36.571411 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"2c818dc85098516c004b18fbf01aa57ea0f0a817f59523978cf0e86c2b78304e"} Dec 08 21:38:36 crc kubenswrapper[4791]: I1208 21:38:36.571681 4791 scope.go:117] "RemoveContainer" containerID="11eb0a686e8878342818f508b449514d71e01b400661915caf0121820b7c92ca" Dec 08 21:38:37 crc kubenswrapper[4791]: I1208 21:38:37.579459 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"294a71027908b890218887589647a3ead6eb10efa3b336cb826e5a4ab73343c7"} Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.350371 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.353139 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.357440 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.359223 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.360936 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-zl8lp" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.361549 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-stccd" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.375061 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.388884 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.433810 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.435594 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.442821 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.443089 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-gh74c" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.444234 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.447288 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-qv69v" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.459765 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xh5l\" (UniqueName: \"kubernetes.io/projected/684d1010-fc58-4789-b8f6-ebe783ec15fe-kube-api-access-6xh5l\") pod \"cinder-operator-controller-manager-6c677c69b-4d6px\" (UID: \"684d1010-fc58-4789-b8f6-ebe783ec15fe\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.459862 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpdmw\" (UniqueName: \"kubernetes.io/projected/c3e8c89c-91da-44c4-95ec-20b5d543eca1-kube-api-access-tpdmw\") pod \"barbican-operator-controller-manager-7d9dfd778-9dcqh\" (UID: \"c3e8c89c-91da-44c4-95ec-20b5d543eca1\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.460094 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.485486 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.498151 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.500409 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.503798 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-h2hpf" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.525693 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.548557 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.550815 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.554779 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.556577 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.561105 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk9q8\" (UniqueName: \"kubernetes.io/projected/45ebd174-c21e-4fb5-ae01-cf6b3d5e7079-kube-api-access-mk9q8\") pod \"designate-operator-controller-manager-697fb699cf-zn5rk\" (UID: \"45ebd174-c21e-4fb5-ae01-cf6b3d5e7079\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.561150 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwt6v\" (UniqueName: \"kubernetes.io/projected/06e9548e-f7f7-4d48-a10a-06de61005b07-kube-api-access-wwt6v\") pod \"heat-operator-controller-manager-5f64f6f8bb-fsm5r\" (UID: \"06e9548e-f7f7-4d48-a10a-06de61005b07\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.561178 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xh5l\" (UniqueName: \"kubernetes.io/projected/684d1010-fc58-4789-b8f6-ebe783ec15fe-kube-api-access-6xh5l\") pod \"cinder-operator-controller-manager-6c677c69b-4d6px\" (UID: \"684d1010-fc58-4789-b8f6-ebe783ec15fe\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.561229 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwzgd\" (UniqueName: \"kubernetes.io/projected/430711b5-aa60-4462-a730-242ecb914d6c-kube-api-access-dwzgd\") pod \"glance-operator-controller-manager-5697bb5779-rzq2w\" (UID: \"430711b5-aa60-4462-a730-242ecb914d6c\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.561266 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpdmw\" (UniqueName: \"kubernetes.io/projected/c3e8c89c-91da-44c4-95ec-20b5d543eca1-kube-api-access-tpdmw\") pod \"barbican-operator-controller-manager-7d9dfd778-9dcqh\" (UID: \"c3e8c89c-91da-44c4-95ec-20b5d543eca1\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.564186 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-c5868" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.575804 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.580648 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.580884 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-spfws" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.617562 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.619551 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.624658 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xwkbd" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.632741 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpdmw\" (UniqueName: \"kubernetes.io/projected/c3e8c89c-91da-44c4-95ec-20b5d543eca1-kube-api-access-tpdmw\") pod \"barbican-operator-controller-manager-7d9dfd778-9dcqh\" (UID: \"c3e8c89c-91da-44c4-95ec-20b5d543eca1\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.633825 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xh5l\" (UniqueName: \"kubernetes.io/projected/684d1010-fc58-4789-b8f6-ebe783ec15fe-kube-api-access-6xh5l\") pod \"cinder-operator-controller-manager-6c677c69b-4d6px\" (UID: \"684d1010-fc58-4789-b8f6-ebe783ec15fe\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.648762 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.657140 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.668299 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk9q8\" (UniqueName: \"kubernetes.io/projected/45ebd174-c21e-4fb5-ae01-cf6b3d5e7079-kube-api-access-mk9q8\") pod \"designate-operator-controller-manager-697fb699cf-zn5rk\" (UID: \"45ebd174-c21e-4fb5-ae01-cf6b3d5e7079\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.668545 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb596\" (UniqueName: \"kubernetes.io/projected/b6c24020-d177-4816-ac96-7f97f8f243a1-kube-api-access-vb596\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.668582 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwt6v\" (UniqueName: \"kubernetes.io/projected/06e9548e-f7f7-4d48-a10a-06de61005b07-kube-api-access-wwt6v\") pod \"heat-operator-controller-manager-5f64f6f8bb-fsm5r\" (UID: \"06e9548e-f7f7-4d48-a10a-06de61005b07\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.668602 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.668671 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwzgd\" (UniqueName: \"kubernetes.io/projected/430711b5-aa60-4462-a730-242ecb914d6c-kube-api-access-dwzgd\") pod \"glance-operator-controller-manager-5697bb5779-rzq2w\" (UID: \"430711b5-aa60-4462-a730-242ecb914d6c\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.673003 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.674809 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.675677 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bkk5\" (UniqueName: \"kubernetes.io/projected/253be35d-aa0c-417b-8dc8-7ef23f63ce45-kube-api-access-8bkk5\") pod \"horizon-operator-controller-manager-68c6d99b8f-4mwvh\" (UID: \"253be35d-aa0c-417b-8dc8-7ef23f63ce45\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.681321 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.688053 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-pnjqk" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.700670 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.706196 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.712402 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.714124 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.717602 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwzgd\" (UniqueName: \"kubernetes.io/projected/430711b5-aa60-4462-a730-242ecb914d6c-kube-api-access-dwzgd\") pod \"glance-operator-controller-manager-5697bb5779-rzq2w\" (UID: \"430711b5-aa60-4462-a730-242ecb914d6c\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.742478 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-67w4c" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.752936 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwt6v\" (UniqueName: \"kubernetes.io/projected/06e9548e-f7f7-4d48-a10a-06de61005b07-kube-api-access-wwt6v\") pod \"heat-operator-controller-manager-5f64f6f8bb-fsm5r\" (UID: \"06e9548e-f7f7-4d48-a10a-06de61005b07\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.763320 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk9q8\" (UniqueName: \"kubernetes.io/projected/45ebd174-c21e-4fb5-ae01-cf6b3d5e7079-kube-api-access-mk9q8\") pod \"designate-operator-controller-manager-697fb699cf-zn5rk\" (UID: \"45ebd174-c21e-4fb5-ae01-cf6b3d5e7079\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.763391 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.764802 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.767834 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.772101 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-ctrkr" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.778217 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwtrt\" (UniqueName: \"kubernetes.io/projected/55e85e76-95f6-46ce-906a-26ce559775bc-kube-api-access-jwtrt\") pod \"keystone-operator-controller-manager-7765d96ddf-5tzxv\" (UID: \"55e85e76-95f6-46ce-906a-26ce559775bc\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.778304 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz5wd\" (UniqueName: \"kubernetes.io/projected/301abb1d-1139-4636-805b-c6458568fe7e-kube-api-access-wz5wd\") pod \"manila-operator-controller-manager-5b5fd79c9c-ddnts\" (UID: \"301abb1d-1139-4636-805b-c6458568fe7e\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.778421 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bkk5\" (UniqueName: \"kubernetes.io/projected/253be35d-aa0c-417b-8dc8-7ef23f63ce45-kube-api-access-8bkk5\") pod \"horizon-operator-controller-manager-68c6d99b8f-4mwvh\" (UID: \"253be35d-aa0c-417b-8dc8-7ef23f63ce45\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.778447 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cj8p\" (UniqueName: \"kubernetes.io/projected/23d7f321-494b-46fa-890a-6cb7f47fdb49-kube-api-access-9cj8p\") pod \"ironic-operator-controller-manager-967d97867-s5pvq\" (UID: \"23d7f321-494b-46fa-890a-6cb7f47fdb49\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.778533 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb596\" (UniqueName: \"kubernetes.io/projected/b6c24020-d177-4816-ac96-7f97f8f243a1-kube-api-access-vb596\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.778573 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:04 crc kubenswrapper[4791]: E1208 21:39:04.778737 4791 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:04 crc kubenswrapper[4791]: E1208 21:39:04.778783 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert podName:b6c24020-d177-4816-ac96-7f97f8f243a1 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:05.278765975 +0000 UTC m=+1221.977524310 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert") pod "infra-operator-controller-manager-78d48bff9d-6fqwg" (UID: "b6c24020-d177-4816-ac96-7f97f8f243a1") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.783186 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.838415 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.846424 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb596\" (UniqueName: \"kubernetes.io/projected/b6c24020-d177-4816-ac96-7f97f8f243a1-kube-api-access-vb596\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.853701 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bkk5\" (UniqueName: \"kubernetes.io/projected/253be35d-aa0c-417b-8dc8-7ef23f63ce45-kube-api-access-8bkk5\") pod \"horizon-operator-controller-manager-68c6d99b8f-4mwvh\" (UID: \"253be35d-aa0c-417b-8dc8-7ef23f63ce45\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.883236 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwtrt\" (UniqueName: \"kubernetes.io/projected/55e85e76-95f6-46ce-906a-26ce559775bc-kube-api-access-jwtrt\") pod \"keystone-operator-controller-manager-7765d96ddf-5tzxv\" (UID: \"55e85e76-95f6-46ce-906a-26ce559775bc\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.884251 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz5wd\" (UniqueName: \"kubernetes.io/projected/301abb1d-1139-4636-805b-c6458568fe7e-kube-api-access-wz5wd\") pod \"manila-operator-controller-manager-5b5fd79c9c-ddnts\" (UID: \"301abb1d-1139-4636-805b-c6458568fe7e\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.884421 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvsn5\" (UniqueName: \"kubernetes.io/projected/4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235-kube-api-access-tvsn5\") pod \"mariadb-operator-controller-manager-79c8c4686c-nlvf4\" (UID: \"4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.884508 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cj8p\" (UniqueName: \"kubernetes.io/projected/23d7f321-494b-46fa-890a-6cb7f47fdb49-kube-api-access-9cj8p\") pod \"ironic-operator-controller-manager-967d97867-s5pvq\" (UID: \"23d7f321-494b-46fa-890a-6cb7f47fdb49\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.896427 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.897379 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.914791 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4"] Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.922686 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz5wd\" (UniqueName: \"kubernetes.io/projected/301abb1d-1139-4636-805b-c6458568fe7e-kube-api-access-wz5wd\") pod \"manila-operator-controller-manager-5b5fd79c9c-ddnts\" (UID: \"301abb1d-1139-4636-805b-c6458568fe7e\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.981492 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwtrt\" (UniqueName: \"kubernetes.io/projected/55e85e76-95f6-46ce-906a-26ce559775bc-kube-api-access-jwtrt\") pod \"keystone-operator-controller-manager-7765d96ddf-5tzxv\" (UID: \"55e85e76-95f6-46ce-906a-26ce559775bc\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.991631 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvsn5\" (UniqueName: \"kubernetes.io/projected/4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235-kube-api-access-tvsn5\") pod \"mariadb-operator-controller-manager-79c8c4686c-nlvf4\" (UID: \"4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" Dec 08 21:39:04 crc kubenswrapper[4791]: I1208 21:39:04.993926 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.004898 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.007562 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cj8p\" (UniqueName: \"kubernetes.io/projected/23d7f321-494b-46fa-890a-6cb7f47fdb49-kube-api-access-9cj8p\") pod \"ironic-operator-controller-manager-967d97867-s5pvq\" (UID: \"23d7f321-494b-46fa-890a-6cb7f47fdb49\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.012978 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-nkbht" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.077940 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvsn5\" (UniqueName: \"kubernetes.io/projected/4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235-kube-api-access-tvsn5\") pod \"mariadb-operator-controller-manager-79c8c4686c-nlvf4\" (UID: \"4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.096834 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.097881 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5xfn\" (UniqueName: \"kubernetes.io/projected/ac0b7209-48f2-4080-bd26-86462503772b-kube-api-access-g5xfn\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hskzn\" (UID: \"ac0b7209-48f2-4080-bd26-86462503772b\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.129027 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.130805 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.135335 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-k85hl" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.147764 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.154024 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-spmqj"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.157900 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.163753 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-m4psr" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.178279 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.189612 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.194489 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-spmqj"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.199258 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4t9b\" (UniqueName: \"kubernetes.io/projected/8714efaf-0a6b-46ba-aadb-2fef8f7f1a32-kube-api-access-q4t9b\") pod \"nova-operator-controller-manager-697bc559fc-6vmnd\" (UID: \"8714efaf-0a6b-46ba-aadb-2fef8f7f1a32\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.199334 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5xfn\" (UniqueName: \"kubernetes.io/projected/ac0b7209-48f2-4080-bd26-86462503772b-kube-api-access-g5xfn\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hskzn\" (UID: \"ac0b7209-48f2-4080-bd26-86462503772b\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.205402 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.241509 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5xfn\" (UniqueName: \"kubernetes.io/projected/ac0b7209-48f2-4080-bd26-86462503772b-kube-api-access-g5xfn\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hskzn\" (UID: \"ac0b7209-48f2-4080-bd26-86462503772b\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.255839 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.257224 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.260305 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-4qtrl" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.281869 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-258f4"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.283900 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.294556 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-vhnbc" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.305603 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.307012 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4t9b\" (UniqueName: \"kubernetes.io/projected/8714efaf-0a6b-46ba-aadb-2fef8f7f1a32-kube-api-access-q4t9b\") pod \"nova-operator-controller-manager-697bc559fc-6vmnd\" (UID: \"8714efaf-0a6b-46ba-aadb-2fef8f7f1a32\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.307083 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rt7sg\" (UniqueName: \"kubernetes.io/projected/195e298b-eaa8-4d82-a246-bf28d442d9f9-kube-api-access-rt7sg\") pod \"octavia-operator-controller-manager-998648c74-spmqj\" (UID: \"195e298b-eaa8-4d82-a246-bf28d442d9f9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.307160 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.307300 4791 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.307364 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert podName:b6c24020-d177-4816-ac96-7f97f8f243a1 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:06.307347682 +0000 UTC m=+1223.006106027 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert") pod "infra-operator-controller-manager-78d48bff9d-6fqwg" (UID: "b6c24020-d177-4816-ac96-7f97f8f243a1") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.322813 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.337008 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4t9b\" (UniqueName: \"kubernetes.io/projected/8714efaf-0a6b-46ba-aadb-2fef8f7f1a32-kube-api-access-q4t9b\") pod \"nova-operator-controller-manager-697bc559fc-6vmnd\" (UID: \"8714efaf-0a6b-46ba-aadb-2fef8f7f1a32\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.343020 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.344456 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.353478 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-f75lw" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.359902 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.361604 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.365008 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.365276 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-fbc92" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.381019 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.409386 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d65lr\" (UniqueName: \"kubernetes.io/projected/cc35e433-dd6b-4cdf-9776-49106dbb9f13-kube-api-access-d65lr\") pod \"ovn-operator-controller-manager-b6456fdb6-wx44v\" (UID: \"cc35e433-dd6b-4cdf-9776-49106dbb9f13\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.409611 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r7xc\" (UniqueName: \"kubernetes.io/projected/e652bc09-301e-4200-a0be-ec79798d93b7-kube-api-access-7r7xc\") pod \"placement-operator-controller-manager-78f8948974-258f4\" (UID: \"e652bc09-301e-4200-a0be-ec79798d93b7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.409816 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rt7sg\" (UniqueName: \"kubernetes.io/projected/195e298b-eaa8-4d82-a246-bf28d442d9f9-kube-api-access-rt7sg\") pod \"octavia-operator-controller-manager-998648c74-spmqj\" (UID: \"195e298b-eaa8-4d82-a246-bf28d442d9f9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.412223 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhzf9\" (UniqueName: \"kubernetes.io/projected/a26b22b6-0795-4357-a1ff-9cbdd3b10f45-kube-api-access-vhzf9\") pod \"swift-operator-controller-manager-9d58d64bc-d2fdf\" (UID: \"a26b22b6-0795-4357-a1ff-9cbdd3b10f45\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.429410 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rt7sg\" (UniqueName: \"kubernetes.io/projected/195e298b-eaa8-4d82-a246-bf28d442d9f9-kube-api-access-rt7sg\") pod \"octavia-operator-controller-manager-998648c74-spmqj\" (UID: \"195e298b-eaa8-4d82-a246-bf28d442d9f9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.449530 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-258f4"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.465330 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.475693 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.492348 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.513815 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.515663 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r7xc\" (UniqueName: \"kubernetes.io/projected/e652bc09-301e-4200-a0be-ec79798d93b7-kube-api-access-7r7xc\") pod \"placement-operator-controller-manager-78f8948974-258f4\" (UID: \"e652bc09-301e-4200-a0be-ec79798d93b7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.515776 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.515863 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhzf9\" (UniqueName: \"kubernetes.io/projected/a26b22b6-0795-4357-a1ff-9cbdd3b10f45-kube-api-access-vhzf9\") pod \"swift-operator-controller-manager-9d58d64bc-d2fdf\" (UID: \"a26b22b6-0795-4357-a1ff-9cbdd3b10f45\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.515897 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lff7\" (UniqueName: \"kubernetes.io/projected/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-kube-api-access-7lff7\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.515967 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d65lr\" (UniqueName: \"kubernetes.io/projected/cc35e433-dd6b-4cdf-9776-49106dbb9f13-kube-api-access-d65lr\") pod \"ovn-operator-controller-manager-b6456fdb6-wx44v\" (UID: \"cc35e433-dd6b-4cdf-9776-49106dbb9f13\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.516906 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.527077 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.528130 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-pv5cp" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.538290 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r7xc\" (UniqueName: \"kubernetes.io/projected/e652bc09-301e-4200-a0be-ec79798d93b7-kube-api-access-7r7xc\") pod \"placement-operator-controller-manager-78f8948974-258f4\" (UID: \"e652bc09-301e-4200-a0be-ec79798d93b7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.538597 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhzf9\" (UniqueName: \"kubernetes.io/projected/a26b22b6-0795-4357-a1ff-9cbdd3b10f45-kube-api-access-vhzf9\") pod \"swift-operator-controller-manager-9d58d64bc-d2fdf\" (UID: \"a26b22b6-0795-4357-a1ff-9cbdd3b10f45\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.539827 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d65lr\" (UniqueName: \"kubernetes.io/projected/cc35e433-dd6b-4cdf-9776-49106dbb9f13-kube-api-access-d65lr\") pod \"ovn-operator-controller-manager-b6456fdb6-wx44v\" (UID: \"cc35e433-dd6b-4cdf-9776-49106dbb9f13\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.548945 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.550765 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.553170 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-qj46k" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.570563 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.610010 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.618996 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.619077 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbxp5\" (UniqueName: \"kubernetes.io/projected/74434a32-0961-43af-b800-8de05830b266-kube-api-access-lbxp5\") pod \"test-operator-controller-manager-5854674fcc-v9cg7\" (UID: \"74434a32-0961-43af-b800-8de05830b266\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.619113 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lff7\" (UniqueName: \"kubernetes.io/projected/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-kube-api-access-7lff7\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.619182 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwqd8\" (UniqueName: \"kubernetes.io/projected/bcd8d669-4a40-401d-af99-651b840fb48b-kube-api-access-cwqd8\") pod \"telemetry-operator-controller-manager-65f6d9c768-58wmm\" (UID: \"bcd8d669-4a40-401d-af99-651b840fb48b\") " pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.619351 4791 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.619398 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert podName:0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:06.119381576 +0000 UTC m=+1222.818139911 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fsnbxn" (UID: "0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.625196 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.625848 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.640592 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lff7\" (UniqueName: \"kubernetes.io/projected/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-kube-api-access-7lff7\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.652578 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.655517 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.660404 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-tpzsz" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.664448 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.692123 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.728975 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwqd8\" (UniqueName: \"kubernetes.io/projected/bcd8d669-4a40-401d-af99-651b840fb48b-kube-api-access-cwqd8\") pod \"telemetry-operator-controller-manager-65f6d9c768-58wmm\" (UID: \"bcd8d669-4a40-401d-af99-651b840fb48b\") " pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.729057 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4tpb\" (UniqueName: \"kubernetes.io/projected/6b775709-57bb-4fa2-9eb9-4785356c119c-kube-api-access-x4tpb\") pod \"watcher-operator-controller-manager-667bd8d554-vszqd\" (UID: \"6b775709-57bb-4fa2-9eb9-4785356c119c\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.729270 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbxp5\" (UniqueName: \"kubernetes.io/projected/74434a32-0961-43af-b800-8de05830b266-kube-api-access-lbxp5\") pod \"test-operator-controller-manager-5854674fcc-v9cg7\" (UID: \"74434a32-0961-43af-b800-8de05830b266\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.752060 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.753726 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.759776 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-dp4pb" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.759782 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.759872 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.773933 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbxp5\" (UniqueName: \"kubernetes.io/projected/74434a32-0961-43af-b800-8de05830b266-kube-api-access-lbxp5\") pod \"test-operator-controller-manager-5854674fcc-v9cg7\" (UID: \"74434a32-0961-43af-b800-8de05830b266\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.788033 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.817835 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.819108 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.821892 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwqd8\" (UniqueName: \"kubernetes.io/projected/bcd8d669-4a40-401d-af99-651b840fb48b-kube-api-access-cwqd8\") pod \"telemetry-operator-controller-manager-65f6d9c768-58wmm\" (UID: \"bcd8d669-4a40-401d-af99-651b840fb48b\") " pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.822898 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-xfxn9" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.831784 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v2zb\" (UniqueName: \"kubernetes.io/projected/e6338e82-465c-4bbd-862a-5835f329caad-kube-api-access-9v2zb\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.832002 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.832116 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.832210 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4tpb\" (UniqueName: \"kubernetes.io/projected/6b775709-57bb-4fa2-9eb9-4785356c119c-kube-api-access-x4tpb\") pod \"watcher-operator-controller-manager-667bd8d554-vszqd\" (UID: \"6b775709-57bb-4fa2-9eb9-4785356c119c\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.860288 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4tpb\" (UniqueName: \"kubernetes.io/projected/6b775709-57bb-4fa2-9eb9-4785356c119c-kube-api-access-x4tpb\") pod \"watcher-operator-controller-manager-667bd8d554-vszqd\" (UID: \"6b775709-57bb-4fa2-9eb9-4785356c119c\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.879813 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.883846 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.894617 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.900646 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.917146 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.933904 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdcck\" (UniqueName: \"kubernetes.io/projected/48279dfc-ae82-45f8-ba4e-3906c0b1cefa-kube-api-access-gdcck\") pod \"rabbitmq-cluster-operator-manager-668c99d594-fsp7x\" (UID: \"48279dfc-ae82-45f8-ba4e-3906c0b1cefa\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.934171 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.934410 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v2zb\" (UniqueName: \"kubernetes.io/projected/e6338e82-465c-4bbd-862a-5835f329caad-kube-api-access-9v2zb\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.934576 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.934852 4791 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.935013 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:06.434997489 +0000 UTC m=+1223.133755834 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "metrics-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.935341 4791 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: E1208 21:39:05.935441 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:06.4354321 +0000 UTC m=+1223.134190445 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "webhook-server-cert" not found Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.940173 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" event={"ID":"684d1010-fc58-4789-b8f6-ebe783ec15fe","Type":"ContainerStarted","Data":"d02e1971439c372798b294c532535bc0e739285607eeb7a056c7df9173a715f1"} Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.942413 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" event={"ID":"45ebd174-c21e-4fb5-ae01-cf6b3d5e7079","Type":"ContainerStarted","Data":"1963ccacb7301a92c59d18428711fe0c4a0bd3c9087bd4006bb4e2f22ecf1208"} Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.945006 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" event={"ID":"c3e8c89c-91da-44c4-95ec-20b5d543eca1","Type":"ContainerStarted","Data":"9fcae7441ef52022a79c6b58f41e20273c7051535d16e8ab2f06b10aac7029bd"} Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.964697 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px"] Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.966462 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v2zb\" (UniqueName: \"kubernetes.io/projected/e6338e82-465c-4bbd-862a-5835f329caad-kube-api-access-9v2zb\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:05 crc kubenswrapper[4791]: I1208 21:39:05.992946 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk"] Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.036335 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdcck\" (UniqueName: \"kubernetes.io/projected/48279dfc-ae82-45f8-ba4e-3906c0b1cefa-kube-api-access-gdcck\") pod \"rabbitmq-cluster-operator-manager-668c99d594-fsp7x\" (UID: \"48279dfc-ae82-45f8-ba4e-3906c0b1cefa\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.054252 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdcck\" (UniqueName: \"kubernetes.io/projected/48279dfc-ae82-45f8-ba4e-3906c0b1cefa-kube-api-access-gdcck\") pod \"rabbitmq-cluster-operator-manager-668c99d594-fsp7x\" (UID: \"48279dfc-ae82-45f8-ba4e-3906c0b1cefa\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.137901 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.138064 4791 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.138146 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert podName:0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:07.138127767 +0000 UTC m=+1223.836886112 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fsnbxn" (UID: "0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.273551 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" Dec 08 21:39:06 crc kubenswrapper[4791]: W1208 21:39:06.321481 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06e9548e_f7f7_4d48_a10a_06de61005b07.slice/crio-e2a082d2e6258fef74ff48797b308b77efb702ec824ba77e5fa3fdd55df436cc WatchSource:0}: Error finding container e2a082d2e6258fef74ff48797b308b77efb702ec824ba77e5fa3fdd55df436cc: Status 404 returned error can't find the container with id e2a082d2e6258fef74ff48797b308b77efb702ec824ba77e5fa3fdd55df436cc Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.323573 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r"] Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.343522 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.343687 4791 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.343790 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert podName:b6c24020-d177-4816-ac96-7f97f8f243a1 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:08.343755216 +0000 UTC m=+1225.042513561 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert") pod "infra-operator-controller-manager-78d48bff9d-6fqwg" (UID: "b6c24020-d177-4816-ac96-7f97f8f243a1") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.348900 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w"] Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.391093 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh"] Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.445568 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.445988 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.445769 4791 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.446127 4791 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.446179 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:07.446047618 +0000 UTC m=+1224.144805963 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "webhook-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: E1208 21:39:06.446274 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:07.446265634 +0000 UTC m=+1224.145023979 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "metrics-server-cert" not found Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.464866 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts"] Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.479832 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4"] Dec 08 21:39:06 crc kubenswrapper[4791]: W1208 21:39:06.489000 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod301abb1d_1139_4636_805b_c6458568fe7e.slice/crio-ee80a140f5221b79e3e4f163e0a8a6fdf44ecdf2d1f8fa6811c280a1f87bf10e WatchSource:0}: Error finding container ee80a140f5221b79e3e4f163e0a8a6fdf44ecdf2d1f8fa6811c280a1f87bf10e: Status 404 returned error can't find the container with id ee80a140f5221b79e3e4f163e0a8a6fdf44ecdf2d1f8fa6811c280a1f87bf10e Dec 08 21:39:06 crc kubenswrapper[4791]: W1208 21:39:06.490865 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55e85e76_95f6_46ce_906a_26ce559775bc.slice/crio-c5a450a25c74a7ad9acf2fa0eb13182e766c27f6abd3be118c6f909ad30189b5 WatchSource:0}: Error finding container c5a450a25c74a7ad9acf2fa0eb13182e766c27f6abd3be118c6f909ad30189b5: Status 404 returned error can't find the container with id c5a450a25c74a7ad9acf2fa0eb13182e766c27f6abd3be118c6f909ad30189b5 Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.493083 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv"] Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.956971 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" event={"ID":"4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235","Type":"ContainerStarted","Data":"037be1b3706507a7c4a173aba818d7fcf8f07e50b34a8489dc88904af68e9a01"} Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.958787 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" event={"ID":"55e85e76-95f6-46ce-906a-26ce559775bc","Type":"ContainerStarted","Data":"c5a450a25c74a7ad9acf2fa0eb13182e766c27f6abd3be118c6f909ad30189b5"} Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.960364 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" event={"ID":"253be35d-aa0c-417b-8dc8-7ef23f63ce45","Type":"ContainerStarted","Data":"504eb2f623ecd4995a88c99b699f348fbe2a057b22f9088f49e1680959ccb72d"} Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.970192 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" event={"ID":"301abb1d-1139-4636-805b-c6458568fe7e","Type":"ContainerStarted","Data":"ee80a140f5221b79e3e4f163e0a8a6fdf44ecdf2d1f8fa6811c280a1f87bf10e"} Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.973426 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" event={"ID":"430711b5-aa60-4462-a730-242ecb914d6c","Type":"ContainerStarted","Data":"b8ecb3c4a1e964354b39d109d540cdf061b457801b7e2b23f2a8f8515e8c50f5"} Dec 08 21:39:06 crc kubenswrapper[4791]: I1208 21:39:06.977612 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" event={"ID":"06e9548e-f7f7-4d48-a10a-06de61005b07","Type":"ContainerStarted","Data":"e2a082d2e6258fef74ff48797b308b77efb702ec824ba77e5fa3fdd55df436cc"} Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.162975 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.163133 4791 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.163306 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert podName:0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:09.163287911 +0000 UTC m=+1225.862046256 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fsnbxn" (UID: "0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.349820 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-258f4"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.355784 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.373359 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-spmqj"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.447616 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.473082 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.473336 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.473631 4791 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.473774 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:09.473750056 +0000 UTC m=+1226.172508401 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "metrics-server-cert" not found Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.475742 4791 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.475836 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:09.475811447 +0000 UTC m=+1226.174569792 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "webhook-server-cert" not found Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.478407 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq"] Dec 08 21:39:07 crc kubenswrapper[4791]: W1208 21:39:07.484064 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74434a32_0961_43af_b800_8de05830b266.slice/crio-a1781f8c01bb1a5fb3b04d12f74be29c20ca9a1c2c9f285df28df1ac52ec73ec WatchSource:0}: Error finding container a1781f8c01bb1a5fb3b04d12f74be29c20ca9a1c2c9f285df28df1ac52ec73ec: Status 404 returned error can't find the container with id a1781f8c01bb1a5fb3b04d12f74be29c20ca9a1c2c9f285df28df1ac52ec73ec Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.496861 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d65lr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-wx44v_openstack-operators(cc35e433-dd6b-4cdf-9776-49106dbb9f13): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.504392 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d65lr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-wx44v_openstack-operators(cc35e433-dd6b-4cdf-9776-49106dbb9f13): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.505466 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" podUID="cc35e433-dd6b-4cdf-9776-49106dbb9f13" Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.511587 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gdcck,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-fsp7x_openstack-operators(48279dfc-ae82-45f8-ba4e-3906c0b1cefa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.513356 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf"] Dec 08 21:39:07 crc kubenswrapper[4791]: E1208 21:39:07.513605 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" podUID="48279dfc-ae82-45f8-ba4e-3906c0b1cefa" Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.526616 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.544321 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.555140 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.563628 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v"] Dec 08 21:39:07 crc kubenswrapper[4791]: I1208 21:39:07.574772 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x"] Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.007010 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" event={"ID":"48279dfc-ae82-45f8-ba4e-3906c0b1cefa","Type":"ContainerStarted","Data":"d27d3c2f244e70380bdaf4f8604db2b7023b18f03146e6d08c2b18c6e3a4e809"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.009532 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" event={"ID":"195e298b-eaa8-4d82-a246-bf28d442d9f9","Type":"ContainerStarted","Data":"205513fa9943d11dbe7ba5e6cb296c90e282d7e3e07b0f90522a4cc93e3c3f53"} Dec 08 21:39:08 crc kubenswrapper[4791]: E1208 21:39:08.009778 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" podUID="48279dfc-ae82-45f8-ba4e-3906c0b1cefa" Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.018589 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" event={"ID":"ac0b7209-48f2-4080-bd26-86462503772b","Type":"ContainerStarted","Data":"0d16d9d132e02d88c43eb07a2f7eecd020722e3ccf254eb3311a5fcb4a432938"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.022795 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" event={"ID":"23d7f321-494b-46fa-890a-6cb7f47fdb49","Type":"ContainerStarted","Data":"fe4f7bf7d9d4092c8d63b46d716bd5166c6fe62e8e7764cc51f236ddd3b67538"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.024844 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" event={"ID":"a26b22b6-0795-4357-a1ff-9cbdd3b10f45","Type":"ContainerStarted","Data":"8f6a3fbec58e234e5ece9d2e4782debc0a07a06f5d9d9ac95ec34d995b02a780"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.026702 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" event={"ID":"6b775709-57bb-4fa2-9eb9-4785356c119c","Type":"ContainerStarted","Data":"fcd66c5141358a0fd51225673c3aa68ec82a434495e41769106e85ffe11e1251"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.028378 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" event={"ID":"74434a32-0961-43af-b800-8de05830b266","Type":"ContainerStarted","Data":"a1781f8c01bb1a5fb3b04d12f74be29c20ca9a1c2c9f285df28df1ac52ec73ec"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.030150 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" event={"ID":"cc35e433-dd6b-4cdf-9776-49106dbb9f13","Type":"ContainerStarted","Data":"d46b84bbc2fc6b77c61b480b1e96925e0d6e59b85200a08d44efd39cd5498b19"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.031802 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" event={"ID":"8714efaf-0a6b-46ba-aadb-2fef8f7f1a32","Type":"ContainerStarted","Data":"924c00bb24460b76093278e4991c735326769950848857857d3d7ceda1c643c1"} Dec 08 21:39:08 crc kubenswrapper[4791]: E1208 21:39:08.034730 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" podUID="cc35e433-dd6b-4cdf-9776-49106dbb9f13" Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.038415 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" event={"ID":"e652bc09-301e-4200-a0be-ec79798d93b7","Type":"ContainerStarted","Data":"5326cd40d0ccfaec7e22928e63bfe822f5dc1098341a93c4ef455ddd1b36bd10"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.039629 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"747a3bcb5b248bd98171c55dc581f18b2c6f39ba83f6baae842ff9ae7df3f3c4"} Dec 08 21:39:08 crc kubenswrapper[4791]: I1208 21:39:08.391551 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:08 crc kubenswrapper[4791]: E1208 21:39:08.391797 4791 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:08 crc kubenswrapper[4791]: E1208 21:39:08.391921 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert podName:b6c24020-d177-4816-ac96-7f97f8f243a1 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:12.391886609 +0000 UTC m=+1229.090644954 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert") pod "infra-operator-controller-manager-78d48bff9d-6fqwg" (UID: "b6c24020-d177-4816-ac96-7f97f8f243a1") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.051150 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" podUID="cc35e433-dd6b-4cdf-9776-49106dbb9f13" Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.052346 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" podUID="48279dfc-ae82-45f8-ba4e-3906c0b1cefa" Dec 08 21:39:09 crc kubenswrapper[4791]: I1208 21:39:09.213246 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.213505 4791 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.213603 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert podName:0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:13.213579858 +0000 UTC m=+1229.912338273 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fsnbxn" (UID: "0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:09 crc kubenswrapper[4791]: I1208 21:39:09.519314 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:09 crc kubenswrapper[4791]: I1208 21:39:09.519427 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.519569 4791 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.519618 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:13.519603022 +0000 UTC m=+1230.218361367 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "metrics-server-cert" not found Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.519922 4791 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:39:09 crc kubenswrapper[4791]: E1208 21:39:09.520009 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:13.519985972 +0000 UTC m=+1230.218744397 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "webhook-server-cert" not found Dec 08 21:39:12 crc kubenswrapper[4791]: I1208 21:39:12.483182 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:12 crc kubenswrapper[4791]: E1208 21:39:12.483411 4791 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:12 crc kubenswrapper[4791]: E1208 21:39:12.483664 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert podName:b6c24020-d177-4816-ac96-7f97f8f243a1 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:20.483636764 +0000 UTC m=+1237.182395169 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert") pod "infra-operator-controller-manager-78d48bff9d-6fqwg" (UID: "b6c24020-d177-4816-ac96-7f97f8f243a1") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:39:13 crc kubenswrapper[4791]: I1208 21:39:13.300679 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:13 crc kubenswrapper[4791]: E1208 21:39:13.300856 4791 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:13 crc kubenswrapper[4791]: E1208 21:39:13.301155 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert podName:0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46 nodeName:}" failed. No retries permitted until 2025-12-08 21:39:21.301141928 +0000 UTC m=+1237.999900273 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fsnbxn" (UID: "0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:39:13 crc kubenswrapper[4791]: I1208 21:39:13.607439 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:13 crc kubenswrapper[4791]: I1208 21:39:13.607612 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:13 crc kubenswrapper[4791]: E1208 21:39:13.607745 4791 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:39:13 crc kubenswrapper[4791]: E1208 21:39:13.607798 4791 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:39:13 crc kubenswrapper[4791]: E1208 21:39:13.607837 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:21.607819019 +0000 UTC m=+1238.306577364 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "metrics-server-cert" not found Dec 08 21:39:13 crc kubenswrapper[4791]: E1208 21:39:13.607853 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:21.607847519 +0000 UTC m=+1238.306605864 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "webhook-server-cert" not found Dec 08 21:39:14 crc kubenswrapper[4791]: I1208 21:39:14.736203 4791 patch_prober.go:28] interesting pod/metrics-server-f799f76d7-kg8mn container/metrics-server namespace/openshift-monitoring: Readiness probe status=failure output="Get \"https://10.217.0.77:10250/livez\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 21:39:14 crc kubenswrapper[4791]: I1208 21:39:14.736204 4791 patch_prober.go:28] interesting pod/metrics-server-f799f76d7-kg8mn container/metrics-server namespace/openshift-monitoring: Liveness probe status=failure output="Get \"https://10.217.0.77:10250/livez\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 21:39:14 crc kubenswrapper[4791]: I1208 21:39:14.736276 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" podUID="8fb0db65-5bab-450b-a32b-eb6f13028d4b" containerName="metrics-server" probeResult="failure" output="Get \"https://10.217.0.77:10250/livez\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:39:14 crc kubenswrapper[4791]: I1208 21:39:14.736332 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-monitoring/metrics-server-f799f76d7-kg8mn" podUID="8fb0db65-5bab-450b-a32b-eb6f13028d4b" containerName="metrics-server" probeResult="failure" output="Get \"https://10.217.0.77:10250/livez\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:39:18 crc kubenswrapper[4791]: E1208 21:39:18.650727 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3" Dec 08 21:39:18 crc kubenswrapper[4791]: E1208 21:39:18.651449 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6xh5l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-6c677c69b-4d6px_openstack-operators(684d1010-fc58-4789-b8f6-ebe783ec15fe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:20 crc kubenswrapper[4791]: I1208 21:39:20.551348 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:20 crc kubenswrapper[4791]: I1208 21:39:20.565621 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6c24020-d177-4816-ac96-7f97f8f243a1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-6fqwg\" (UID: \"b6c24020-d177-4816-ac96-7f97f8f243a1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:20 crc kubenswrapper[4791]: E1208 21:39:20.787952 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:7d6ca59745ac48971cbc2d72b53fe413144fa5c0c21f2ef1d7aaf1291851e501: Get \"https://quay.io/v2/openstack-k8s-operators/keystone-operator/blobs/sha256:7d6ca59745ac48971cbc2d72b53fe413144fa5c0c21f2ef1d7aaf1291851e501\": context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 08 21:39:20 crc kubenswrapper[4791]: E1208 21:39:20.788443 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jwtrt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-5tzxv_openstack-operators(55e85e76-95f6-46ce-906a-26ce559775bc): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:7d6ca59745ac48971cbc2d72b53fe413144fa5c0c21f2ef1d7aaf1291851e501: Get \"https://quay.io/v2/openstack-k8s-operators/keystone-operator/blobs/sha256:7d6ca59745ac48971cbc2d72b53fe413144fa5c0c21f2ef1d7aaf1291851e501\": context canceled" logger="UnhandledError" Dec 08 21:39:20 crc kubenswrapper[4791]: I1208 21:39:20.797645 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:21 crc kubenswrapper[4791]: I1208 21:39:21.366815 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:21 crc kubenswrapper[4791]: I1208 21:39:21.370928 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fsnbxn\" (UID: \"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:21 crc kubenswrapper[4791]: E1208 21:39:21.549975 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027" Dec 08 21:39:21 crc kubenswrapper[4791]: E1208 21:39:21.550219 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dwzgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-5697bb5779-rzq2w_openstack-operators(430711b5-aa60-4462-a730-242ecb914d6c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:21 crc kubenswrapper[4791]: I1208 21:39:21.609611 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:21 crc kubenswrapper[4791]: I1208 21:39:21.671643 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:21 crc kubenswrapper[4791]: I1208 21:39:21.672070 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:21 crc kubenswrapper[4791]: E1208 21:39:21.672227 4791 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:39:21 crc kubenswrapper[4791]: E1208 21:39:21.672286 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs podName:e6338e82-465c-4bbd-862a-5835f329caad nodeName:}" failed. No retries permitted until 2025-12-08 21:39:37.672271608 +0000 UTC m=+1254.371029953 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-2cvxn" (UID: "e6338e82-465c-4bbd-862a-5835f329caad") : secret "webhook-server-cert" not found Dec 08 21:39:21 crc kubenswrapper[4791]: I1208 21:39:21.688705 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:31 crc kubenswrapper[4791]: E1208 21:39:31.190247 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 08 21:39:31 crc kubenswrapper[4791]: E1208 21:39:31.192059 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g5xfn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-hskzn_openstack-operators(ac0b7209-48f2-4080-bd26-86462503772b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:32 crc kubenswrapper[4791]: E1208 21:39:32.928997 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8" Dec 08 21:39:32 crc kubenswrapper[4791]: E1208 21:39:32.929423 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x4tpb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-667bd8d554-vszqd_openstack-operators(6b775709-57bb-4fa2-9eb9-4785356c119c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:33 crc kubenswrapper[4791]: E1208 21:39:33.725331 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.83:5001/openstack-k8s-operators/telemetry-operator:00aa5f531d8a387f8456969f47533ccbe3dc4576" Dec 08 21:39:33 crc kubenswrapper[4791]: E1208 21:39:33.725760 4791 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.83:5001/openstack-k8s-operators/telemetry-operator:00aa5f531d8a387f8456969f47533ccbe3dc4576" Dec 08 21:39:33 crc kubenswrapper[4791]: E1208 21:39:33.728868 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.83:5001/openstack-k8s-operators/telemetry-operator:00aa5f531d8a387f8456969f47533ccbe3dc4576,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cwqd8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:34 crc kubenswrapper[4791]: E1208 21:39:34.096971 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad" Dec 08 21:39:34 crc kubenswrapper[4791]: E1208 21:39:34.097478 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tvsn5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-79c8c4686c-nlvf4_openstack-operators(4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:34 crc kubenswrapper[4791]: E1208 21:39:34.846315 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 08 21:39:34 crc kubenswrapper[4791]: E1208 21:39:34.846545 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lbxp5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-v9cg7_openstack-operators(74434a32-0961-43af-b800-8de05830b266): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:35 crc kubenswrapper[4791]: E1208 21:39:35.381350 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 08 21:39:35 crc kubenswrapper[4791]: E1208 21:39:35.381524 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d65lr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-wx44v_openstack-operators(cc35e433-dd6b-4cdf-9776-49106dbb9f13): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:36 crc kubenswrapper[4791]: E1208 21:39:36.425515 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 08 21:39:36 crc kubenswrapper[4791]: E1208 21:39:36.426119 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q4t9b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-6vmnd_openstack-operators(8714efaf-0a6b-46ba-aadb-2fef8f7f1a32): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:37 crc kubenswrapper[4791]: I1208 21:39:37.702099 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:37 crc kubenswrapper[4791]: I1208 21:39:37.709428 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6338e82-465c-4bbd-862a-5835f329caad-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-2cvxn\" (UID: \"e6338e82-465c-4bbd-862a-5835f329caad\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:37 crc kubenswrapper[4791]: I1208 21:39:37.747355 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:38 crc kubenswrapper[4791]: I1208 21:39:38.920692 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg"] Dec 08 21:39:38 crc kubenswrapper[4791]: I1208 21:39:38.936526 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn"] Dec 08 21:39:39 crc kubenswrapper[4791]: W1208 21:39:39.058749 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6c24020_d177_4816_ac96_7f97f8f243a1.slice/crio-ac408e103ee7c847894de35486a2361fb9987b50039238dac9f5c6856820d948 WatchSource:0}: Error finding container ac408e103ee7c847894de35486a2361fb9987b50039238dac9f5c6856820d948: Status 404 returned error can't find the container with id ac408e103ee7c847894de35486a2361fb9987b50039238dac9f5c6856820d948 Dec 08 21:39:39 crc kubenswrapper[4791]: W1208 21:39:39.060893 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ab7fad3_1b4c_4f78_99a7_4ace7aa64b46.slice/crio-94ff051bfd5cc8a8f559f7003c8405e4b175a85e2047ccdf8e3765d912352a03 WatchSource:0}: Error finding container 94ff051bfd5cc8a8f559f7003c8405e4b175a85e2047ccdf8e3765d912352a03: Status 404 returned error can't find the container with id 94ff051bfd5cc8a8f559f7003c8405e4b175a85e2047ccdf8e3765d912352a03 Dec 08 21:39:39 crc kubenswrapper[4791]: I1208 21:39:39.385560 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn"] Dec 08 21:39:39 crc kubenswrapper[4791]: I1208 21:39:39.393882 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" event={"ID":"253be35d-aa0c-417b-8dc8-7ef23f63ce45","Type":"ContainerStarted","Data":"109022cb2bbf7209d42bc18cfce71f65313f59277c362f374b9ec0106393a1eb"} Dec 08 21:39:39 crc kubenswrapper[4791]: I1208 21:39:39.396530 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" event={"ID":"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46","Type":"ContainerStarted","Data":"94ff051bfd5cc8a8f559f7003c8405e4b175a85e2047ccdf8e3765d912352a03"} Dec 08 21:39:39 crc kubenswrapper[4791]: I1208 21:39:39.398236 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" event={"ID":"b6c24020-d177-4816-ac96-7f97f8f243a1","Type":"ContainerStarted","Data":"ac408e103ee7c847894de35486a2361fb9987b50039238dac9f5c6856820d948"} Dec 08 21:39:39 crc kubenswrapper[4791]: I1208 21:39:39.401417 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" event={"ID":"45ebd174-c21e-4fb5-ae01-cf6b3d5e7079","Type":"ContainerStarted","Data":"b30ac59ebe0831ae47bdf8a68bfa856730ecb4a251431ff6349bfcce3a4b92a7"} Dec 08 21:39:39 crc kubenswrapper[4791]: W1208 21:39:39.436012 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6338e82_465c_4bbd_862a_5835f329caad.slice/crio-9079a2dc3f2eae24647d2009b0cca1ade919f1904f5ed9673b291abc5f2dc540 WatchSource:0}: Error finding container 9079a2dc3f2eae24647d2009b0cca1ade919f1904f5ed9673b291abc5f2dc540: Status 404 returned error can't find the container with id 9079a2dc3f2eae24647d2009b0cca1ade919f1904f5ed9673b291abc5f2dc540 Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.415815 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" event={"ID":"06e9548e-f7f7-4d48-a10a-06de61005b07","Type":"ContainerStarted","Data":"12af97a96329f9918b4334d1760e0111b0473d00a7416828deca6824e420175c"} Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.417293 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" event={"ID":"23d7f321-494b-46fa-890a-6cb7f47fdb49","Type":"ContainerStarted","Data":"2a71f4517ff2b6f2023c3e8fba6f9103d5eee8d90c8cf06147f8f537d859de1b"} Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.420190 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" event={"ID":"a26b22b6-0795-4357-a1ff-9cbdd3b10f45","Type":"ContainerStarted","Data":"cd1f171d8c66353faee0cd9c5bcfab4c85d13fd2467e84d576f75e70e10d56d8"} Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.421789 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" event={"ID":"195e298b-eaa8-4d82-a246-bf28d442d9f9","Type":"ContainerStarted","Data":"f40e511561a156cb92b4e2eb6a20b51ad499a3e0f331d86c394be7d884dd0b74"} Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.423488 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" event={"ID":"c3e8c89c-91da-44c4-95ec-20b5d543eca1","Type":"ContainerStarted","Data":"0f7c28eef4ae4c86a9eee1358508838ba0fb9d52cdb709f321f4d87dadd20fde"} Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.424568 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" event={"ID":"e6338e82-465c-4bbd-862a-5835f329caad","Type":"ContainerStarted","Data":"9079a2dc3f2eae24647d2009b0cca1ade919f1904f5ed9673b291abc5f2dc540"} Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.426167 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" event={"ID":"e652bc09-301e-4200-a0be-ec79798d93b7","Type":"ContainerStarted","Data":"a76ba6c6a62d22814caa15472f11d28175a1c96c244b5051069519cab02ca0bc"} Dec 08 21:39:40 crc kubenswrapper[4791]: I1208 21:39:40.429234 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" event={"ID":"301abb1d-1139-4636-805b-c6458568fe7e","Type":"ContainerStarted","Data":"3b8a1d6c48a317aaa8cdfc946ca6686a20fd9e6ed7e79526089a27d7bd31b7cb"} Dec 08 21:39:46 crc kubenswrapper[4791]: E1208 21:39:46.624290 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:39:46 crc kubenswrapper[4791]: E1208 21:39:46.625115 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jwtrt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-5tzxv_openstack-operators(55e85e76-95f6-46ce-906a-26ce559775bc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:46 crc kubenswrapper[4791]: E1208 21:39:46.626451 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:7d6ca59745ac48971cbc2d72b53fe413144fa5c0c21f2ef1d7aaf1291851e501: Get \\\"https://quay.io/v2/openstack-k8s-operators/keystone-operator/blobs/sha256:7d6ca59745ac48971cbc2d72b53fe413144fa5c0c21f2ef1d7aaf1291851e501\\\": context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" podUID="55e85e76-95f6-46ce-906a-26ce559775bc" Dec 08 21:39:46 crc kubenswrapper[4791]: E1208 21:39:46.932950 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" podUID="4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235" Dec 08 21:39:46 crc kubenswrapper[4791]: E1208 21:39:46.933228 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" podUID="cc35e433-dd6b-4cdf-9776-49106dbb9f13" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.489618 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" event={"ID":"c3e8c89c-91da-44c4-95ec-20b5d543eca1","Type":"ContainerStarted","Data":"392ab11ab6a77f9b9b895f92f7bf7f03d8b4215bb45ac377e66c76a8a336b500"} Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.490482 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.493066 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.494221 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" event={"ID":"cc35e433-dd6b-4cdf-9776-49106dbb9f13","Type":"ContainerStarted","Data":"9c2dc1687c2b4d949b1d2a9b7a3eec7d1b4ae363a23d913f4399c758f656c9cf"} Dec 08 21:39:47 crc kubenswrapper[4791]: E1208 21:39:47.495822 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" podUID="cc35e433-dd6b-4cdf-9776-49106dbb9f13" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.496152 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" event={"ID":"e6338e82-465c-4bbd-862a-5835f329caad","Type":"ContainerStarted","Data":"538cae33335ebf15ceb830f6fcbeea3af8d179d76840ff085eeac98bcf524188"} Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.496899 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.498896 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" event={"ID":"253be35d-aa0c-417b-8dc8-7ef23f63ce45","Type":"ContainerStarted","Data":"29d256d631857a935e35f4533743ca347e4ab99d8f840a4809786bc394df922b"} Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.499098 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.501422 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.502742 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" event={"ID":"4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235","Type":"ContainerStarted","Data":"7e7687766348ec0779c98d1bf308eca2013a4d04489955064b24176e612d60eb"} Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.506629 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" event={"ID":"45ebd174-c21e-4fb5-ae01-cf6b3d5e7079","Type":"ContainerStarted","Data":"18c2e954a6fc22d2d226fdd748512b65fc3ae314a75bb6847a77c9f2dcf1bb02"} Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.507238 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.509402 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.519335 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" event={"ID":"a26b22b6-0795-4357-a1ff-9cbdd3b10f45","Type":"ContainerStarted","Data":"60b116c04b55ef2e2315ebf99276bf84bfc046876e8d483526f5ca37a3fdecc3"} Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.519505 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.522000 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" event={"ID":"48279dfc-ae82-45f8-ba4e-3906c0b1cefa","Type":"ContainerStarted","Data":"89c6087983354f338f925f7e7e555c1207e966cb432c4d4fe47aff0dad8f7898"} Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.522334 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.522883 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-9dcqh" podStartSLOduration=2.470510064 podStartE2EDuration="43.522859872s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:05.602647248 +0000 UTC m=+1222.301405593" lastFinishedPulling="2025-12-08 21:39:46.654997056 +0000 UTC m=+1263.353755401" observedRunningTime="2025-12-08 21:39:47.512402873 +0000 UTC m=+1264.211161228" watchObservedRunningTime="2025-12-08 21:39:47.522859872 +0000 UTC m=+1264.221618227" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.574334 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" podStartSLOduration=42.574317868 podStartE2EDuration="42.574317868s" podCreationTimestamp="2025-12-08 21:39:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:39:47.572294578 +0000 UTC m=+1264.271052933" watchObservedRunningTime="2025-12-08 21:39:47.574317868 +0000 UTC m=+1264.273076213" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.632657 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-4mwvh" podStartSLOduration=3.515812505 podStartE2EDuration="43.632636704s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:06.411132797 +0000 UTC m=+1223.109891152" lastFinishedPulling="2025-12-08 21:39:46.527957006 +0000 UTC m=+1263.226715351" observedRunningTime="2025-12-08 21:39:47.624452541 +0000 UTC m=+1264.323210886" watchObservedRunningTime="2025-12-08 21:39:47.632636704 +0000 UTC m=+1264.331395049" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.679352 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zn5rk" podStartSLOduration=3.050744966 podStartE2EDuration="43.679329512s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:05.900208611 +0000 UTC m=+1222.598966956" lastFinishedPulling="2025-12-08 21:39:46.528793157 +0000 UTC m=+1263.227551502" observedRunningTime="2025-12-08 21:39:47.675561428 +0000 UTC m=+1264.374319803" watchObservedRunningTime="2025-12-08 21:39:47.679329512 +0000 UTC m=+1264.378087857" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.736109 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-d2fdf" podStartSLOduration=4.6211622949999995 podStartE2EDuration="43.736090499s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.417597965 +0000 UTC m=+1224.116356310" lastFinishedPulling="2025-12-08 21:39:46.532526169 +0000 UTC m=+1263.231284514" observedRunningTime="2025-12-08 21:39:47.732026748 +0000 UTC m=+1264.430785093" watchObservedRunningTime="2025-12-08 21:39:47.736090499 +0000 UTC m=+1264.434848844" Dec 08 21:39:47 crc kubenswrapper[4791]: I1208 21:39:47.754363 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-fsp7x" podStartSLOduration=11.046330576 podStartE2EDuration="42.754344072s" podCreationTimestamp="2025-12-08 21:39:05 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.511457447 +0000 UTC m=+1224.210215792" lastFinishedPulling="2025-12-08 21:39:39.219470943 +0000 UTC m=+1255.918229288" observedRunningTime="2025-12-08 21:39:47.753823469 +0000 UTC m=+1264.452581814" watchObservedRunningTime="2025-12-08 21:39:47.754344072 +0000 UTC m=+1264.453102417" Dec 08 21:39:47 crc kubenswrapper[4791]: E1208 21:39:47.839771 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:39:47 crc kubenswrapper[4791]: E1208 21:39:47.839931 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6xh5l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-6c677c69b-4d6px_openstack-operators(684d1010-fc58-4789-b8f6-ebe783ec15fe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:47 crc kubenswrapper[4791]: E1208 21:39:47.841134 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" podUID="684d1010-fc58-4789-b8f6-ebe783ec15fe" Dec 08 21:39:51 crc kubenswrapper[4791]: I1208 21:39:51.561027 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" event={"ID":"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46","Type":"ContainerStarted","Data":"3a45995d7a9dc3aa1f6aa9fc709dd52b487655daaf8e78eafd0dc0224f46ca8d"} Dec 08 21:39:51 crc kubenswrapper[4791]: I1208 21:39:51.563207 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" event={"ID":"4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235","Type":"ContainerStarted","Data":"57edb11b8b456285c840c03abf6194956f56bf37a909b2fd41297b5dc3f657f9"} Dec 08 21:39:51 crc kubenswrapper[4791]: I1208 21:39:51.563362 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" Dec 08 21:39:51 crc kubenswrapper[4791]: I1208 21:39:51.585674 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" podStartSLOduration=2.990818222 podStartE2EDuration="47.585655213s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:06.495181564 +0000 UTC m=+1223.193939909" lastFinishedPulling="2025-12-08 21:39:51.090018565 +0000 UTC m=+1267.788776900" observedRunningTime="2025-12-08 21:39:51.580995648 +0000 UTC m=+1268.279753993" watchObservedRunningTime="2025-12-08 21:39:51.585655213 +0000 UTC m=+1268.284413558" Dec 08 21:39:51 crc kubenswrapper[4791]: E1208 21:39:51.711892 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" podUID="74434a32-0961-43af-b800-8de05830b266" Dec 08 21:39:51 crc kubenswrapper[4791]: E1208 21:39:51.724821 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" podUID="8714efaf-0a6b-46ba-aadb-2fef8f7f1a32" Dec 08 21:39:51 crc kubenswrapper[4791]: E1208 21:39:51.730665 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" podUID="6b775709-57bb-4fa2-9eb9-4785356c119c" Dec 08 21:39:51 crc kubenswrapper[4791]: E1208 21:39:51.767310 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" podUID="430711b5-aa60-4462-a730-242ecb914d6c" Dec 08 21:39:51 crc kubenswrapper[4791]: E1208 21:39:51.888424 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:39:51 crc kubenswrapper[4791]: E1208 21:39:51.957701 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" podUID="ac0b7209-48f2-4080-bd26-86462503772b" Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.572875 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" event={"ID":"684d1010-fc58-4789-b8f6-ebe783ec15fe","Type":"ContainerStarted","Data":"d99d0d47766942488b4526a8e176aed7daa269bbcede335edfffaa90cfa72f91"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.572931 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" event={"ID":"684d1010-fc58-4789-b8f6-ebe783ec15fe","Type":"ContainerStarted","Data":"9cb59d82412a0c60d9579997a5dcdf15f278ad680eb7fc51c3caa394afc96aae"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.574611 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" event={"ID":"ac0b7209-48f2-4080-bd26-86462503772b","Type":"ContainerStarted","Data":"8c5f25fc03828e7cb45a9fec99023ec6cda80584acf209fdec923997faa019d5"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.578543 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" event={"ID":"b6c24020-d177-4816-ac96-7f97f8f243a1","Type":"ContainerStarted","Data":"1887f9916870864e99076f4380dd6659387c216442e03ceb60d1274eb5833758"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.578590 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" event={"ID":"b6c24020-d177-4816-ac96-7f97f8f243a1","Type":"ContainerStarted","Data":"203567557cb84eab67fbe85224422daa29665476ce5d88b6d0911802d9e6846f"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.580461 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" event={"ID":"e652bc09-301e-4200-a0be-ec79798d93b7","Type":"ContainerStarted","Data":"ca88becf803a266078a07678133a7d66b89e10af9b05b6720e33d7dd5574190d"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.580677 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.582110 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" event={"ID":"430711b5-aa60-4462-a730-242ecb914d6c","Type":"ContainerStarted","Data":"97c337427041965123295058bb2477181e1d2a135add88387c0994e35ec016dd"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.582823 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.583930 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" event={"ID":"8714efaf-0a6b-46ba-aadb-2fef8f7f1a32","Type":"ContainerStarted","Data":"466ac97e64f58de7ffe7e77e18f5b910da5127a674a0da39cf95d466498aee76"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.585857 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" event={"ID":"301abb1d-1139-4636-805b-c6458568fe7e","Type":"ContainerStarted","Data":"d3a47b2b678b1efb8bbe7a83ab146510e41e5faa88e38efc4619bbc1ed091afb"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.587574 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" event={"ID":"6b775709-57bb-4fa2-9eb9-4785356c119c","Type":"ContainerStarted","Data":"dd991664e06ede8278960456de8cf2584410bdf01d14279557c0d3faed041f98"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.589288 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" event={"ID":"195e298b-eaa8-4d82-a246-bf28d442d9f9","Type":"ContainerStarted","Data":"f35dcfd6a6d20e10fa00ac9677ebf15e550baae779c2d9d30a1efc8bc00a7b22"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.591073 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" event={"ID":"0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46","Type":"ContainerStarted","Data":"9118410def9ecd5e1a0e5943cfb398c6385272d978d85c919175232a5e81dae6"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.592485 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" event={"ID":"74434a32-0961-43af-b800-8de05830b266","Type":"ContainerStarted","Data":"fb27beeb8c50adcea7b110c5394ec9190e03a9c56823c2e83a728291cdb4bc55"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.593801 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"753af94093bde00086762148847c73a0b492bc53981177c4e1fbe1c8576c6de6"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.597816 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" event={"ID":"06e9548e-f7f7-4d48-a10a-06de61005b07","Type":"ContainerStarted","Data":"f50d83d5b48c0bcf12300a191e3b8dd7b99597938eb239e511f36cd4d0761c4b"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.597967 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.600479 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.600508 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" event={"ID":"23d7f321-494b-46fa-890a-6cb7f47fdb49","Type":"ContainerStarted","Data":"e461355924870e93a0ee3c72462ce5205688e1015876166212350a08bd280c08"} Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.691070 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fsm5r" podStartSLOduration=3.915295858 podStartE2EDuration="48.69105281s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:06.326190738 +0000 UTC m=+1223.024949083" lastFinishedPulling="2025-12-08 21:39:51.10194768 +0000 UTC m=+1267.800706035" observedRunningTime="2025-12-08 21:39:52.685169994 +0000 UTC m=+1269.383928339" watchObservedRunningTime="2025-12-08 21:39:52.69105281 +0000 UTC m=+1269.389811145" Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.704262 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-258f4" podStartSLOduration=4.999216993 podStartE2EDuration="48.704243447s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.377900015 +0000 UTC m=+1224.076658360" lastFinishedPulling="2025-12-08 21:39:51.082926469 +0000 UTC m=+1267.781684814" observedRunningTime="2025-12-08 21:39:52.700875434 +0000 UTC m=+1269.399633779" watchObservedRunningTime="2025-12-08 21:39:52.704243447 +0000 UTC m=+1269.403001792" Dec 08 21:39:52 crc kubenswrapper[4791]: I1208 21:39:52.807596 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" podStartSLOduration=5.126904205 podStartE2EDuration="48.807572789s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.413250727 +0000 UTC m=+1224.112009072" lastFinishedPulling="2025-12-08 21:39:51.093919321 +0000 UTC m=+1267.792677656" observedRunningTime="2025-12-08 21:39:52.802946265 +0000 UTC m=+1269.501704620" watchObservedRunningTime="2025-12-08 21:39:52.807572789 +0000 UTC m=+1269.506331134" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.613640 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.614002 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.614062 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.614079 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.614128 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.614491 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.615244 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.615650 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-967d97867-s5pvq" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.616327 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.667792 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" podStartSLOduration=37.678332728 podStartE2EDuration="49.667769517s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:39.068968012 +0000 UTC m=+1255.767726367" lastFinishedPulling="2025-12-08 21:39:51.058404811 +0000 UTC m=+1267.757163156" observedRunningTime="2025-12-08 21:39:53.641058495 +0000 UTC m=+1270.339816840" watchObservedRunningTime="2025-12-08 21:39:53.667769517 +0000 UTC m=+1270.366527872" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.675460 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-ddnts" podStartSLOduration=5.054908669 podStartE2EDuration="49.675438027s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:06.492404585 +0000 UTC m=+1223.191162930" lastFinishedPulling="2025-12-08 21:39:51.112933943 +0000 UTC m=+1267.811692288" observedRunningTime="2025-12-08 21:39:53.664605758 +0000 UTC m=+1270.363364113" watchObservedRunningTime="2025-12-08 21:39:53.675438027 +0000 UTC m=+1270.374196372" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.716782 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-spmqj" podStartSLOduration=5.988028201 podStartE2EDuration="49.716764582s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.38131592 +0000 UTC m=+1224.080074265" lastFinishedPulling="2025-12-08 21:39:51.110052301 +0000 UTC m=+1267.808810646" observedRunningTime="2025-12-08 21:39:53.716610008 +0000 UTC m=+1270.415368343" watchObservedRunningTime="2025-12-08 21:39:53.716764582 +0000 UTC m=+1270.415522927" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.742695 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" podStartSLOduration=4.483893061 podStartE2EDuration="49.742676204s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:05.840121712 +0000 UTC m=+1222.538880057" lastFinishedPulling="2025-12-08 21:39:51.098904855 +0000 UTC m=+1267.797663200" observedRunningTime="2025-12-08 21:39:53.73526546 +0000 UTC m=+1270.434023805" watchObservedRunningTime="2025-12-08 21:39:53.742676204 +0000 UTC m=+1270.441434549" Dec 08 21:39:53 crc kubenswrapper[4791]: I1208 21:39:53.791808 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" podStartSLOduration=37.800276871 podStartE2EDuration="49.791789212s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:39.066527871 +0000 UTC m=+1255.765286226" lastFinishedPulling="2025-12-08 21:39:51.058040222 +0000 UTC m=+1267.756798567" observedRunningTime="2025-12-08 21:39:53.788420728 +0000 UTC m=+1270.487179063" watchObservedRunningTime="2025-12-08 21:39:53.791789212 +0000 UTC m=+1270.490547557" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.645142 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" event={"ID":"55e85e76-95f6-46ce-906a-26ce559775bc","Type":"ContainerStarted","Data":"32ce56a9f94852703ea0b7bf240f3898a82344678c924650997cf7ee46d2a0c2"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.645660 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" event={"ID":"55e85e76-95f6-46ce-906a-26ce559775bc","Type":"ContainerStarted","Data":"92cf0c467d04372c8e330e3cc10db0b489c791634df7ad1b022a6bf4c3096767"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.645916 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.647090 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" event={"ID":"8714efaf-0a6b-46ba-aadb-2fef8f7f1a32","Type":"ContainerStarted","Data":"86a7a35b89145373963a79c647ce23aa21bd36e4cade492ccc90f21e6e48b346"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.648795 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" event={"ID":"ac0b7209-48f2-4080-bd26-86462503772b","Type":"ContainerStarted","Data":"07ac2fc7186f75f814775d2e3a17d5a1e9b221a7d5bd78023a9ad5ebbaa62fad"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.648910 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.650450 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" event={"ID":"430711b5-aa60-4462-a730-242ecb914d6c","Type":"ContainerStarted","Data":"9f4b13324154ae9b8ce7c66f6e8d51e38ab1f50ad8814d9ea5bbe43fd5fd6a69"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.650669 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.651805 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"fa46debf45bbc591d3f3d2ff279b0f4ae741603ae5627a9244c1052ab274ef8a"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.652049 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.653312 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" event={"ID":"6b775709-57bb-4fa2-9eb9-4785356c119c","Type":"ContainerStarted","Data":"612df807555d645c56ec69f44794ffee12e6c78fe19000969d31898c7279ff90"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.653461 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.655052 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" event={"ID":"74434a32-0961-43af-b800-8de05830b266","Type":"ContainerStarted","Data":"0a53153aaa25c34620be5b5837d17ec0e945e1d8dcfe8919093af344aa187b30"} Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.655754 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.689910 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" podStartSLOduration=3.504803575 podStartE2EDuration="53.689893529s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:06.492912547 +0000 UTC m=+1223.191670892" lastFinishedPulling="2025-12-08 21:39:56.678002501 +0000 UTC m=+1273.376760846" observedRunningTime="2025-12-08 21:39:57.686805423 +0000 UTC m=+1274.385563768" watchObservedRunningTime="2025-12-08 21:39:57.689893529 +0000 UTC m=+1274.388651874" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.704719 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" podStartSLOduration=4.404661142 podStartE2EDuration="53.704684226s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.377902985 +0000 UTC m=+1224.076661330" lastFinishedPulling="2025-12-08 21:39:56.677926069 +0000 UTC m=+1273.376684414" observedRunningTime="2025-12-08 21:39:57.70402223 +0000 UTC m=+1274.402780565" watchObservedRunningTime="2025-12-08 21:39:57.704684226 +0000 UTC m=+1274.403442571" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.728840 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" podStartSLOduration=4.492858109 podStartE2EDuration="53.728822084s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.446997929 +0000 UTC m=+1224.145756274" lastFinishedPulling="2025-12-08 21:39:56.682961904 +0000 UTC m=+1273.381720249" observedRunningTime="2025-12-08 21:39:57.721103743 +0000 UTC m=+1274.419862088" watchObservedRunningTime="2025-12-08 21:39:57.728822084 +0000 UTC m=+1274.427580429" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.742522 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" podStartSLOduration=4.516224169 podStartE2EDuration="53.742506794s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.452200388 +0000 UTC m=+1224.150958733" lastFinishedPulling="2025-12-08 21:39:56.678483013 +0000 UTC m=+1273.377241358" observedRunningTime="2025-12-08 21:39:57.73912374 +0000 UTC m=+1274.437882085" watchObservedRunningTime="2025-12-08 21:39:57.742506794 +0000 UTC m=+1274.441265129" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.759380 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-2cvxn" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.772318 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podStartSLOduration=4.514926112 podStartE2EDuration="53.772298812s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.421442971 +0000 UTC m=+1224.120201316" lastFinishedPulling="2025-12-08 21:39:56.678815671 +0000 UTC m=+1273.377574016" observedRunningTime="2025-12-08 21:39:57.766830927 +0000 UTC m=+1274.465589292" watchObservedRunningTime="2025-12-08 21:39:57.772298812 +0000 UTC m=+1274.471057157" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.799744 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" podStartSLOduration=3.476703129 podStartE2EDuration="53.799725593s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:06.355590002 +0000 UTC m=+1223.054348347" lastFinishedPulling="2025-12-08 21:39:56.678612466 +0000 UTC m=+1273.377370811" observedRunningTime="2025-12-08 21:39:57.793103068 +0000 UTC m=+1274.491861413" watchObservedRunningTime="2025-12-08 21:39:57.799725593 +0000 UTC m=+1274.498483938" Dec 08 21:39:57 crc kubenswrapper[4791]: I1208 21:39:57.814453 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" podStartSLOduration=4.6288522180000005 podStartE2EDuration="53.814434667s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.49234136 +0000 UTC m=+1224.191099705" lastFinishedPulling="2025-12-08 21:39:56.677923809 +0000 UTC m=+1273.376682154" observedRunningTime="2025-12-08 21:39:57.813147415 +0000 UTC m=+1274.511905760" watchObservedRunningTime="2025-12-08 21:39:57.814434667 +0000 UTC m=+1274.513193022" Dec 08 21:39:58 crc kubenswrapper[4791]: I1208 21:39:58.673101 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" Dec 08 21:40:00 crc kubenswrapper[4791]: I1208 21:40:00.804138 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-6fqwg" Dec 08 21:40:01 crc kubenswrapper[4791]: I1208 21:40:01.615373 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fsnbxn" Dec 08 21:40:01 crc kubenswrapper[4791]: I1208 21:40:01.695387 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" event={"ID":"cc35e433-dd6b-4cdf-9776-49106dbb9f13","Type":"ContainerStarted","Data":"657bfcc2573344f9c73cc2264aa85134875f4b827c88d9e292730ac00f443c52"} Dec 08 21:40:01 crc kubenswrapper[4791]: I1208 21:40:01.695882 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" Dec 08 21:40:01 crc kubenswrapper[4791]: I1208 21:40:01.716268 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" podStartSLOduration=4.141068584 podStartE2EDuration="57.716247156s" podCreationTimestamp="2025-12-08 21:39:04 +0000 UTC" firstStartedPulling="2025-12-08 21:39:07.496726809 +0000 UTC m=+1224.195485154" lastFinishedPulling="2025-12-08 21:40:01.071905381 +0000 UTC m=+1277.770663726" observedRunningTime="2025-12-08 21:40:01.710690618 +0000 UTC m=+1278.409448973" watchObservedRunningTime="2025-12-08 21:40:01.716247156 +0000 UTC m=+1278.415005501" Dec 08 21:40:04 crc kubenswrapper[4791]: I1208 21:40:04.717195 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-4d6px" Dec 08 21:40:04 crc kubenswrapper[4791]: I1208 21:40:04.786106 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-rzq2w" Dec 08 21:40:05 crc kubenswrapper[4791]: I1208 21:40:05.152940 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5tzxv" Dec 08 21:40:05 crc kubenswrapper[4791]: I1208 21:40:05.223406 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-nlvf4" Dec 08 21:40:05 crc kubenswrapper[4791]: I1208 21:40:05.389775 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hskzn" Dec 08 21:40:05 crc kubenswrapper[4791]: I1208 21:40:05.476004 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-6vmnd" Dec 08 21:40:05 crc kubenswrapper[4791]: I1208 21:40:05.885672 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-vszqd" Dec 08 21:40:05 crc kubenswrapper[4791]: I1208 21:40:05.886866 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:40:05 crc kubenswrapper[4791]: I1208 21:40:05.903803 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-v9cg7" Dec 08 21:40:15 crc kubenswrapper[4791]: I1208 21:40:15.613163 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wx44v" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.519569 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tlg5b"] Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.522590 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.536853 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tlg5b"] Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.681117 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnz5r\" (UniqueName: \"kubernetes.io/projected/70d75e9a-8c9b-459e-87fe-55dea21bec5b-kube-api-access-mnz5r\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.681770 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-utilities\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.681857 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-catalog-content\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.783804 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-utilities\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.783886 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-catalog-content\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.784010 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnz5r\" (UniqueName: \"kubernetes.io/projected/70d75e9a-8c9b-459e-87fe-55dea21bec5b-kube-api-access-mnz5r\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.784343 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-utilities\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.784378 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-catalog-content\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.805784 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnz5r\" (UniqueName: \"kubernetes.io/projected/70d75e9a-8c9b-459e-87fe-55dea21bec5b-kube-api-access-mnz5r\") pod \"redhat-marketplace-tlg5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:25 crc kubenswrapper[4791]: I1208 21:40:25.843036 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:26 crc kubenswrapper[4791]: I1208 21:40:26.305793 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tlg5b"] Dec 08 21:40:26 crc kubenswrapper[4791]: I1208 21:40:26.912276 4791 generic.go:334] "Generic (PLEG): container finished" podID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerID="82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308" exitCode=0 Dec 08 21:40:26 crc kubenswrapper[4791]: I1208 21:40:26.912405 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tlg5b" event={"ID":"70d75e9a-8c9b-459e-87fe-55dea21bec5b","Type":"ContainerDied","Data":"82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308"} Dec 08 21:40:26 crc kubenswrapper[4791]: I1208 21:40:26.912801 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tlg5b" event={"ID":"70d75e9a-8c9b-459e-87fe-55dea21bec5b","Type":"ContainerStarted","Data":"2a0c77805fea1edb9c525c97c5fe7dcd8eb3881f76db0f9550435b910bf1a990"} Dec 08 21:40:27 crc kubenswrapper[4791]: I1208 21:40:27.926502 4791 generic.go:334] "Generic (PLEG): container finished" podID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerID="221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02" exitCode=0 Dec 08 21:40:27 crc kubenswrapper[4791]: I1208 21:40:27.926565 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tlg5b" event={"ID":"70d75e9a-8c9b-459e-87fe-55dea21bec5b","Type":"ContainerDied","Data":"221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02"} Dec 08 21:40:27 crc kubenswrapper[4791]: I1208 21:40:27.929376 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:40:29 crc kubenswrapper[4791]: I1208 21:40:29.945280 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tlg5b" event={"ID":"70d75e9a-8c9b-459e-87fe-55dea21bec5b","Type":"ContainerStarted","Data":"599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3"} Dec 08 21:40:29 crc kubenswrapper[4791]: I1208 21:40:29.965659 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tlg5b" podStartSLOduration=2.937528502 podStartE2EDuration="4.965636287s" podCreationTimestamp="2025-12-08 21:40:25 +0000 UTC" firstStartedPulling="2025-12-08 21:40:26.914439506 +0000 UTC m=+1303.613197851" lastFinishedPulling="2025-12-08 21:40:28.942547291 +0000 UTC m=+1305.641305636" observedRunningTime="2025-12-08 21:40:29.961798442 +0000 UTC m=+1306.660556797" watchObservedRunningTime="2025-12-08 21:40:29.965636287 +0000 UTC m=+1306.664394632" Dec 08 21:40:35 crc kubenswrapper[4791]: I1208 21:40:35.843640 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:35 crc kubenswrapper[4791]: I1208 21:40:35.844240 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:35 crc kubenswrapper[4791]: I1208 21:40:35.921533 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:36 crc kubenswrapper[4791]: I1208 21:40:36.090156 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:36 crc kubenswrapper[4791]: I1208 21:40:36.173187 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tlg5b"] Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.046118 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lpdvq"] Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.048107 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.054124 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.054225 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.054562 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-k7lkf" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.055960 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.068243 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lpdvq"] Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.116528 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lkjjh"] Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.119462 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.123870 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.131847 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lkjjh"] Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.215782 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjrgb\" (UniqueName: \"kubernetes.io/projected/4e83c5d1-7391-49ef-bc37-decc7363bddb-kube-api-access-mjrgb\") pod \"dnsmasq-dns-675f4bcbfc-lpdvq\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.215850 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e83c5d1-7391-49ef-bc37-decc7363bddb-config\") pod \"dnsmasq-dns-675f4bcbfc-lpdvq\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.317950 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjrgb\" (UniqueName: \"kubernetes.io/projected/4e83c5d1-7391-49ef-bc37-decc7363bddb-kube-api-access-mjrgb\") pod \"dnsmasq-dns-675f4bcbfc-lpdvq\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.318000 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e83c5d1-7391-49ef-bc37-decc7363bddb-config\") pod \"dnsmasq-dns-675f4bcbfc-lpdvq\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.318036 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.318061 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-config\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.318109 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bht6\" (UniqueName: \"kubernetes.io/projected/64298554-5f77-48ff-ace7-8a85487060ba-kube-api-access-9bht6\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.319067 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e83c5d1-7391-49ef-bc37-decc7363bddb-config\") pod \"dnsmasq-dns-675f4bcbfc-lpdvq\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.351532 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjrgb\" (UniqueName: \"kubernetes.io/projected/4e83c5d1-7391-49ef-bc37-decc7363bddb-kube-api-access-mjrgb\") pod \"dnsmasq-dns-675f4bcbfc-lpdvq\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.378999 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.419698 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bht6\" (UniqueName: \"kubernetes.io/projected/64298554-5f77-48ff-ace7-8a85487060ba-kube-api-access-9bht6\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.419871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.419901 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-config\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.421249 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-config\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.424379 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.443546 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bht6\" (UniqueName: \"kubernetes.io/projected/64298554-5f77-48ff-ace7-8a85487060ba-kube-api-access-9bht6\") pod \"dnsmasq-dns-78dd6ddcc-lkjjh\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.742096 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:40:37 crc kubenswrapper[4791]: I1208 21:40:37.890641 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lpdvq"] Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.037099 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tlg5b" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="registry-server" containerID="cri-o://599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3" gracePeriod=2 Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.038126 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" event={"ID":"4e83c5d1-7391-49ef-bc37-decc7363bddb","Type":"ContainerStarted","Data":"4a760fed59a98d5de29f46c9a6a39949dc047293cea3a7f44ce8ba8ee58d593e"} Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.267398 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lkjjh"] Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.549248 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.643518 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-catalog-content\") pod \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.643597 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-utilities\") pod \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.643665 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnz5r\" (UniqueName: \"kubernetes.io/projected/70d75e9a-8c9b-459e-87fe-55dea21bec5b-kube-api-access-mnz5r\") pod \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\" (UID: \"70d75e9a-8c9b-459e-87fe-55dea21bec5b\") " Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.644639 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-utilities" (OuterVolumeSpecName: "utilities") pod "70d75e9a-8c9b-459e-87fe-55dea21bec5b" (UID: "70d75e9a-8c9b-459e-87fe-55dea21bec5b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.651545 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70d75e9a-8c9b-459e-87fe-55dea21bec5b-kube-api-access-mnz5r" (OuterVolumeSpecName: "kube-api-access-mnz5r") pod "70d75e9a-8c9b-459e-87fe-55dea21bec5b" (UID: "70d75e9a-8c9b-459e-87fe-55dea21bec5b"). InnerVolumeSpecName "kube-api-access-mnz5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.672758 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70d75e9a-8c9b-459e-87fe-55dea21bec5b" (UID: "70d75e9a-8c9b-459e-87fe-55dea21bec5b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.746097 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnz5r\" (UniqueName: \"kubernetes.io/projected/70d75e9a-8c9b-459e-87fe-55dea21bec5b-kube-api-access-mnz5r\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.746137 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:38 crc kubenswrapper[4791]: I1208 21:40:38.746147 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d75e9a-8c9b-459e-87fe-55dea21bec5b-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.048023 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" event={"ID":"64298554-5f77-48ff-ace7-8a85487060ba","Type":"ContainerStarted","Data":"0044a6ffddcb7b5651e60338c432e206d1bf4a6a5e5b1094e8e46360ac43e3e9"} Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.052118 4791 generic.go:334] "Generic (PLEG): container finished" podID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerID="599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3" exitCode=0 Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.052170 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tlg5b" event={"ID":"70d75e9a-8c9b-459e-87fe-55dea21bec5b","Type":"ContainerDied","Data":"599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3"} Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.052190 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tlg5b" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.052202 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tlg5b" event={"ID":"70d75e9a-8c9b-459e-87fe-55dea21bec5b","Type":"ContainerDied","Data":"2a0c77805fea1edb9c525c97c5fe7dcd8eb3881f76db0f9550435b910bf1a990"} Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.052224 4791 scope.go:117] "RemoveContainer" containerID="599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.079018 4791 scope.go:117] "RemoveContainer" containerID="221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.099293 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tlg5b"] Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.114161 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tlg5b"] Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.123843 4791 scope.go:117] "RemoveContainer" containerID="82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.165765 4791 scope.go:117] "RemoveContainer" containerID="599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3" Dec 08 21:40:39 crc kubenswrapper[4791]: E1208 21:40:39.166401 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3\": container with ID starting with 599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3 not found: ID does not exist" containerID="599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.166454 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3"} err="failed to get container status \"599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3\": rpc error: code = NotFound desc = could not find container \"599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3\": container with ID starting with 599369f493949bb7f329f3c712d4635f3a1167c3d2d8bfacaecacfbe7f700ff3 not found: ID does not exist" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.166482 4791 scope.go:117] "RemoveContainer" containerID="221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02" Dec 08 21:40:39 crc kubenswrapper[4791]: E1208 21:40:39.167035 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02\": container with ID starting with 221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02 not found: ID does not exist" containerID="221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.167095 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02"} err="failed to get container status \"221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02\": rpc error: code = NotFound desc = could not find container \"221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02\": container with ID starting with 221bc5c34fff5905b4caada31707c80b63d5755acf8a0bd3c06ab8a273cf6a02 not found: ID does not exist" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.167123 4791 scope.go:117] "RemoveContainer" containerID="82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308" Dec 08 21:40:39 crc kubenswrapper[4791]: E1208 21:40:39.167530 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308\": container with ID starting with 82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308 not found: ID does not exist" containerID="82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.167586 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308"} err="failed to get container status \"82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308\": rpc error: code = NotFound desc = could not find container \"82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308\": container with ID starting with 82e4bd8a0946e944d7464132cc75a16ddd9fa7e92a7b4b2ad4a29ad2c3c85308 not found: ID does not exist" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.644616 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" path="/var/lib/kubelet/pods/70d75e9a-8c9b-459e-87fe-55dea21bec5b/volumes" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.645819 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lpdvq"] Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.658324 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-f7kxb"] Dec 08 21:40:39 crc kubenswrapper[4791]: E1208 21:40:39.658772 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="registry-server" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.658794 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="registry-server" Dec 08 21:40:39 crc kubenswrapper[4791]: E1208 21:40:39.658808 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="extract-content" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.658814 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="extract-content" Dec 08 21:40:39 crc kubenswrapper[4791]: E1208 21:40:39.658827 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="extract-utilities" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.658835 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="extract-utilities" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.659040 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="70d75e9a-8c9b-459e-87fe-55dea21bec5b" containerName="registry-server" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.661329 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.672838 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-f7kxb"] Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.768231 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlt27\" (UniqueName: \"kubernetes.io/projected/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-kube-api-access-rlt27\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.768310 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-dns-svc\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.768375 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-config\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.873125 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlt27\" (UniqueName: \"kubernetes.io/projected/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-kube-api-access-rlt27\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.873205 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-dns-svc\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.873242 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-config\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.874178 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-dns-svc\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.880169 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-config\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:39 crc kubenswrapper[4791]: I1208 21:40:39.902742 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlt27\" (UniqueName: \"kubernetes.io/projected/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-kube-api-access-rlt27\") pod \"dnsmasq-dns-666b6646f7-f7kxb\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.004301 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.029569 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lkjjh"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.071020 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-56ffw"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.072642 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.088973 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-56ffw"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.192411 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.192772 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf82z\" (UniqueName: \"kubernetes.io/projected/07d0660f-c281-4624-9f12-f524fe2a8092-kube-api-access-kf82z\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.192808 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-config\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.294908 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-config\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.295126 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.295183 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf82z\" (UniqueName: \"kubernetes.io/projected/07d0660f-c281-4624-9f12-f524fe2a8092-kube-api-access-kf82z\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.295969 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.297284 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-config\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.345738 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf82z\" (UniqueName: \"kubernetes.io/projected/07d0660f-c281-4624-9f12-f524fe2a8092-kube-api-access-kf82z\") pod \"dnsmasq-dns-57d769cc4f-56ffw\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.509893 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.677415 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-f7kxb"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.832196 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.834554 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.838426 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.838887 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-fhbnc" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.838972 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.839001 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.839064 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.838924 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.838934 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.851288 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.864695 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-1"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.867073 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-1" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.872150 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-2"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.875441 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-2" Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.878818 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-1"] Dec 08 21:40:40 crc kubenswrapper[4791]: I1208 21:40:40.887876 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-2"] Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.017916 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.017999 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-server-conf\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018027 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-plugins-conf\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018051 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018078 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018107 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-server-conf\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018131 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-tls\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018149 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-confd\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018177 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018222 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018248 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-config-data\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018273 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-confd\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018298 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fxp2\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-kube-api-access-2fxp2\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018329 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-plugins-conf\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018357 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47r7k\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-kube-api-access-47r7k\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018384 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018406 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llxwc\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-kube-api-access-llxwc\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018428 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018451 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6101a045-4b01-484e-a65b-4c406e458ea1-erlang-cookie-secret\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018478 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018497 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-tls\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.018519 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020100 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/87530e07-a720-4b5f-bd6f-c3f8bb540453-pod-info\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020331 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6101a045-4b01-484e-a65b-4c406e458ea1-pod-info\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020376 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-de58f584-d17f-4092-953f-93f5e59a8c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de58f584-d17f-4092-953f-93f5e59a8c79\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020409 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-plugins\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020473 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020501 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-config-data\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020541 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-config-data\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020569 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/87530e07-a720-4b5f-bd6f-c3f8bb540453-erlang-cookie-secret\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020613 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020651 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-plugins\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.020725 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.099445 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" event={"ID":"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5","Type":"ContainerStarted","Data":"063083cd026427a6ecbe95ffde8ce474e823ed62bd1e9b6ef3b9c10171f039ac"} Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126103 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-server-conf\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126154 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-plugins-conf\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126183 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126209 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126229 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-server-conf\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126246 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-tls\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126262 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-confd\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126284 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126322 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126343 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-config-data\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126360 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-confd\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126380 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fxp2\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-kube-api-access-2fxp2\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126403 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-plugins-conf\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126424 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47r7k\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-kube-api-access-47r7k\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126447 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126476 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llxwc\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-kube-api-access-llxwc\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126495 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126515 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6101a045-4b01-484e-a65b-4c406e458ea1-erlang-cookie-secret\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126537 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126555 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-tls\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126573 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126595 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/87530e07-a720-4b5f-bd6f-c3f8bb540453-pod-info\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126628 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6101a045-4b01-484e-a65b-4c406e458ea1-pod-info\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126650 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-de58f584-d17f-4092-953f-93f5e59a8c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de58f584-d17f-4092-953f-93f5e59a8c79\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126673 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-plugins\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126766 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126793 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-config-data\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126823 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-config-data\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126842 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/87530e07-a720-4b5f-bd6f-c3f8bb540453-erlang-cookie-secret\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126875 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126892 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-plugins\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126923 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.126957 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.127497 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.128583 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-server-conf\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.129173 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-plugins-conf\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.129828 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.130106 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.130678 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-plugins\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.131538 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-plugins\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.132111 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-config-data\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.137430 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-tls\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.137688 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.138608 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.138639 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/eefdffdcedaf3f0cb08d747ba345442acf1c20a0979c9cc74971260d7bd1ccca/globalmount\"" pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.138771 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-config-data\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.139427 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-config-data\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.139741 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.142595 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-de58f584-d17f-4092-953f-93f5e59a8c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de58f584-d17f-4092-953f-93f5e59a8c79\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3fa536958a881f7ca87cb8cf50fbb6130f73bdb338ba14b280e917e473c2b7bc/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.140752 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.143307 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a9cbfae718f6452512f72dee7a8be70d65cc858c328982477d684a7be4c0b2ba/globalmount\"" pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.144415 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/87530e07-a720-4b5f-bd6f-c3f8bb540453-plugins-conf\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.144594 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/87530e07-a720-4b5f-bd6f-c3f8bb540453-pod-info\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.143682 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6101a045-4b01-484e-a65b-4c406e458ea1-server-conf\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.145764 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.146022 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.146485 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.146791 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.146841 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.150082 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.151062 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-56ffw"] Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.151456 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6101a045-4b01-484e-a65b-4c406e458ea1-erlang-cookie-secret\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.151466 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-tls\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.151908 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/87530e07-a720-4b5f-bd6f-c3f8bb540453-erlang-cookie-secret\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.159334 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-rabbitmq-confd\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.159655 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6101a045-4b01-484e-a65b-4c406e458ea1-pod-info\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.161279 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-rabbitmq-confd\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.163663 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fxp2\" (UniqueName: \"kubernetes.io/projected/45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9-kube-api-access-2fxp2\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.167828 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llxwc\" (UniqueName: \"kubernetes.io/projected/87530e07-a720-4b5f-bd6f-c3f8bb540453-kube-api-access-llxwc\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.168437 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47r7k\" (UniqueName: \"kubernetes.io/projected/6101a045-4b01-484e-a65b-4c406e458ea1-kube-api-access-47r7k\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.195096 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dee185b4-088c-4fc4-a01e-31cfe0915ec7\") pod \"rabbitmq-server-1\" (UID: \"6101a045-4b01-484e-a65b-4c406e458ea1\") " pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.195784 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-de58f584-d17f-4092-953f-93f5e59a8c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de58f584-d17f-4092-953f-93f5e59a8c79\") pod \"rabbitmq-server-0\" (UID: \"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9\") " pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.204951 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-1" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.208028 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b4bc2a1-d1af-4f90-b06e-541b370584ce\") pod \"rabbitmq-server-2\" (UID: \"87530e07-a720-4b5f-bd6f-c3f8bb540453\") " pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.224569 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-2" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.239015 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.240688 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.245940 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.246366 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.246486 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.246883 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.246931 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.247059 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.247244 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.247484 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-sdsph" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.337561 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.337940 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.337961 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338018 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338055 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338179 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gmtc\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-kube-api-access-2gmtc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338228 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338258 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338554 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338611 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d9cd6ba2-6502-43cf-8e48-36570ea8e831-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.338651 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d9cd6ba2-6502-43cf-8e48-36570ea8e831-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441079 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441140 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441255 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gmtc\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-kube-api-access-2gmtc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441284 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441311 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441381 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441404 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d9cd6ba2-6502-43cf-8e48-36570ea8e831-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441431 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d9cd6ba2-6502-43cf-8e48-36570ea8e831-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441469 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441499 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441522 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.441935 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.442149 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.443368 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.445517 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.446476 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.446509 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2d2ac122382902591a789da14f978a391777abad441d42f7b5a1a529ef270d45/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.448818 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.454281 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d9cd6ba2-6502-43cf-8e48-36570ea8e831-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.456598 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.457164 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d9cd6ba2-6502-43cf-8e48-36570ea8e831-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.464322 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d9cd6ba2-6502-43cf-8e48-36570ea8e831-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.464990 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.470261 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gmtc\" (UniqueName: \"kubernetes.io/projected/d9cd6ba2-6502-43cf-8e48-36570ea8e831-kube-api-access-2gmtc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.553300 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf3494-76c4-4bc3-b3e6-8dd95d2369bd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d9cd6ba2-6502-43cf-8e48-36570ea8e831\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.586428 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:41 crc kubenswrapper[4791]: I1208 21:40:41.968148 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-1"] Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.115028 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-1" event={"ID":"6101a045-4b01-484e-a65b-4c406e458ea1","Type":"ContainerStarted","Data":"6e0d3d6dc2cf8335535d473a0e0352c90a2c3358bdc950a4fa7c3c81eac7160a"} Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.118791 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" event={"ID":"07d0660f-c281-4624-9f12-f524fe2a8092","Type":"ContainerStarted","Data":"6567160b1cce2081238a19664dd2e271a7131495cc20449589c5ea74ceafede5"} Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.339686 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-2"] Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.483457 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.494014 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.670779 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.672856 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.690601 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.691961 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-qfgzk" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.692181 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.693424 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.709806 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.711861 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.789410 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.789702 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0eeeba8c-9e5c-4701-9941-8a324604a18b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.789796 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-kolla-config\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.789903 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-config-data-default\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.789947 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0eeeba8c-9e5c-4701-9941-8a324604a18b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.790014 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eeeba8c-9e5c-4701-9941-8a324604a18b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.790044 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.790070 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47f9c\" (UniqueName: \"kubernetes.io/projected/0eeeba8c-9e5c-4701-9941-8a324604a18b-kube-api-access-47f9c\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.892968 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.893033 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47f9c\" (UniqueName: \"kubernetes.io/projected/0eeeba8c-9e5c-4701-9941-8a324604a18b-kube-api-access-47f9c\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.893067 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.893171 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0eeeba8c-9e5c-4701-9941-8a324604a18b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.893208 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-kolla-config\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.893898 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0eeeba8c-9e5c-4701-9941-8a324604a18b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.894241 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-kolla-config\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.894955 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-config-data-default\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.895036 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0eeeba8c-9e5c-4701-9941-8a324604a18b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.895173 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eeeba8c-9e5c-4701-9941-8a324604a18b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.895850 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-config-data-default\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.897486 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eeeba8c-9e5c-4701-9941-8a324604a18b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.903845 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eeeba8c-9e5c-4701-9941-8a324604a18b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.911657 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.911696 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e4f27d709d591e4e9bb1e3f80d49892c0de9bb1291aacdac51bba24949431c02/globalmount\"" pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.913373 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0eeeba8c-9e5c-4701-9941-8a324604a18b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.917278 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47f9c\" (UniqueName: \"kubernetes.io/projected/0eeeba8c-9e5c-4701-9941-8a324604a18b-kube-api-access-47f9c\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:42 crc kubenswrapper[4791]: I1208 21:40:42.974902 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1152e74a-c83a-4801-b23a-f63525cf5e4c\") pod \"openstack-galera-0\" (UID: \"0eeeba8c-9e5c-4701-9941-8a324604a18b\") " pod="openstack/openstack-galera-0" Dec 08 21:40:43 crc kubenswrapper[4791]: I1208 21:40:43.007828 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 08 21:40:43 crc kubenswrapper[4791]: I1208 21:40:43.162881 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-2" event={"ID":"87530e07-a720-4b5f-bd6f-c3f8bb540453","Type":"ContainerStarted","Data":"402e84163a8beee761b270ced7b05883bac8d4a17a8392dd79ff97f85c02eb11"} Dec 08 21:40:43 crc kubenswrapper[4791]: I1208 21:40:43.165781 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9","Type":"ContainerStarted","Data":"8c9ad8171c235155ab37446d9375d600e325f8ff892f2c413c2747febc4b2e51"} Dec 08 21:40:43 crc kubenswrapper[4791]: I1208 21:40:43.174726 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d9cd6ba2-6502-43cf-8e48-36570ea8e831","Type":"ContainerStarted","Data":"e45036976ce166030c55e3c29d4dd621c506fd3e6893c3c11656d39d3bbe0372"} Dec 08 21:40:43 crc kubenswrapper[4791]: I1208 21:40:43.951189 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.202631 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0eeeba8c-9e5c-4701-9941-8a324604a18b","Type":"ContainerStarted","Data":"23ba3a2b4afba49fe6d206b7e57172d8c88a16e62b7a77d632ff1ed4bf9335e1"} Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.219943 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.235605 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.235795 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.238639 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.238968 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-gc4m5" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.239127 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.239409 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.356653 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e46f211-8213-4219-9389-044888a87181-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.356731 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e46f211-8213-4219-9389-044888a87181-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.356815 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.356872 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt5z7\" (UniqueName: \"kubernetes.io/projected/8e46f211-8213-4219-9389-044888a87181-kube-api-access-bt5z7\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.356910 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8e46f211-8213-4219-9389-044888a87181-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.357114 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.357167 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.357200 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.466531 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.466783 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.466895 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.466934 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e46f211-8213-4219-9389-044888a87181-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.466959 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e46f211-8213-4219-9389-044888a87181-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.467128 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.467227 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt5z7\" (UniqueName: \"kubernetes.io/projected/8e46f211-8213-4219-9389-044888a87181-kube-api-access-bt5z7\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.467300 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8e46f211-8213-4219-9389-044888a87181-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.474232 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.474504 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.475624 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8e46f211-8213-4219-9389-044888a87181-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.475978 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8e46f211-8213-4219-9389-044888a87181-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.480819 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e46f211-8213-4219-9389-044888a87181-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.483663 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e46f211-8213-4219-9389-044888a87181-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.505691 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.505791 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/dc29c301513d177e1bda32d6208a2dfa45b12d35bb64d230c85412a9c2f8c131/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.513084 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt5z7\" (UniqueName: \"kubernetes.io/projected/8e46f211-8213-4219-9389-044888a87181-kube-api-access-bt5z7\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.544039 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.551426 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.561379 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.561611 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.561888 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-txdr9" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.580525 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.689199 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f167d5f8-a6f9-436e-9666-6656bcdc8ab5\") pod \"openstack-cell1-galera-0\" (UID: \"8e46f211-8213-4219-9389-044888a87181\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.690848 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4gh4\" (UniqueName: \"kubernetes.io/projected/d1c662fa-6e9f-4127-af2f-059adea86bd4-kube-api-access-k4gh4\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.690897 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1c662fa-6e9f-4127-af2f-059adea86bd4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.690947 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d1c662fa-6e9f-4127-af2f-059adea86bd4-config-data\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.691046 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1c662fa-6e9f-4127-af2f-059adea86bd4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.691173 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d1c662fa-6e9f-4127-af2f-059adea86bd4-kolla-config\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.793268 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1c662fa-6e9f-4127-af2f-059adea86bd4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.793406 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d1c662fa-6e9f-4127-af2f-059adea86bd4-kolla-config\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.793483 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4gh4\" (UniqueName: \"kubernetes.io/projected/d1c662fa-6e9f-4127-af2f-059adea86bd4-kube-api-access-k4gh4\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.793510 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1c662fa-6e9f-4127-af2f-059adea86bd4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.793553 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d1c662fa-6e9f-4127-af2f-059adea86bd4-config-data\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.794610 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d1c662fa-6e9f-4127-af2f-059adea86bd4-config-data\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.796028 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d1c662fa-6e9f-4127-af2f-059adea86bd4-kolla-config\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.800442 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1c662fa-6e9f-4127-af2f-059adea86bd4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.804798 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1c662fa-6e9f-4127-af2f-059adea86bd4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.816356 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4gh4\" (UniqueName: \"kubernetes.io/projected/d1c662fa-6e9f-4127-af2f-059adea86bd4-kube-api-access-k4gh4\") pod \"memcached-0\" (UID: \"d1c662fa-6e9f-4127-af2f-059adea86bd4\") " pod="openstack/memcached-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.867339 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:44 crc kubenswrapper[4791]: I1208 21:40:44.901277 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 08 21:40:45 crc kubenswrapper[4791]: I1208 21:40:45.837419 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 08 21:40:45 crc kubenswrapper[4791]: W1208 21:40:45.855194 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e46f211_8213_4219_9389_044888a87181.slice/crio-54fc16624867c4c14016ec9350fa8619b1a744cd7b583dd8efa32d43d21825d0 WatchSource:0}: Error finding container 54fc16624867c4c14016ec9350fa8619b1a744cd7b583dd8efa32d43d21825d0: Status 404 returned error can't find the container with id 54fc16624867c4c14016ec9350fa8619b1a744cd7b583dd8efa32d43d21825d0 Dec 08 21:40:46 crc kubenswrapper[4791]: I1208 21:40:46.059907 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 08 21:40:46 crc kubenswrapper[4791]: W1208 21:40:46.084660 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1c662fa_6e9f_4127_af2f_059adea86bd4.slice/crio-ab4f33023a2c4cbc6366afc4181c4c939c2dd295ac73acb1a227ee98daea02a3 WatchSource:0}: Error finding container ab4f33023a2c4cbc6366afc4181c4c939c2dd295ac73acb1a227ee98daea02a3: Status 404 returned error can't find the container with id ab4f33023a2c4cbc6366afc4181c4c939c2dd295ac73acb1a227ee98daea02a3 Dec 08 21:40:46 crc kubenswrapper[4791]: I1208 21:40:46.254393 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8e46f211-8213-4219-9389-044888a87181","Type":"ContainerStarted","Data":"54fc16624867c4c14016ec9350fa8619b1a744cd7b583dd8efa32d43d21825d0"} Dec 08 21:40:46 crc kubenswrapper[4791]: I1208 21:40:46.258741 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d1c662fa-6e9f-4127-af2f-059adea86bd4","Type":"ContainerStarted","Data":"ab4f33023a2c4cbc6366afc4181c4c939c2dd295ac73acb1a227ee98daea02a3"} Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.424139 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-g2zr4"] Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.427497 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.430772 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.430788 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.430916 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-2bbzp" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.435018 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-g2zr4"] Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.503427 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-9b4hh"] Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.505703 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.508629 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-combined-ca-bundle\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.508745 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-run\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.508852 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-run-ovn\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.508949 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-ovn-controller-tls-certs\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.509216 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whsn4\" (UniqueName: \"kubernetes.io/projected/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-kube-api-access-whsn4\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.509296 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-scripts\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.509426 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-log-ovn\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.547674 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-9b4hh"] Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.611253 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-run\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.611541 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whsn4\" (UniqueName: \"kubernetes.io/projected/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-kube-api-access-whsn4\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.611649 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-scripts\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.611757 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-log-ovn\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.611831 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9b8addc-5f49-43c0-a4c3-23ed14252765-scripts\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.611917 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-log\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.611989 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-etc-ovs\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.612127 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-lib\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.612207 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58b26\" (UniqueName: \"kubernetes.io/projected/d9b8addc-5f49-43c0-a4c3-23ed14252765-kube-api-access-58b26\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.612317 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-combined-ca-bundle\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.612386 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-run\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.612455 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-run-ovn\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.612546 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-ovn-controller-tls-certs\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.615362 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-log-ovn\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.615493 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-run\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.615601 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-var-run-ovn\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.618435 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-scripts\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.623742 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-combined-ca-bundle\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.635673 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whsn4\" (UniqueName: \"kubernetes.io/projected/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-kube-api-access-whsn4\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.637775 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ab866e7-9292-4d1c-b55e-6d29c9d23b05-ovn-controller-tls-certs\") pod \"ovn-controller-g2zr4\" (UID: \"0ab866e7-9292-4d1c-b55e-6d29c9d23b05\") " pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.715035 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-run\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.715243 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-run\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.717620 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9b8addc-5f49-43c0-a4c3-23ed14252765-scripts\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.717704 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-log\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.717765 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-etc-ovs\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.717804 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-lib\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.717841 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58b26\" (UniqueName: \"kubernetes.io/projected/d9b8addc-5f49-43c0-a4c3-23ed14252765-kube-api-access-58b26\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.718324 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-log\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.720741 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9b8addc-5f49-43c0-a4c3-23ed14252765-scripts\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.720927 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-etc-ovs\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.721535 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d9b8addc-5f49-43c0-a4c3-23ed14252765-var-lib\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.763117 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58b26\" (UniqueName: \"kubernetes.io/projected/d9b8addc-5f49-43c0-a4c3-23ed14252765-kube-api-access-58b26\") pod \"ovn-controller-ovs-9b4hh\" (UID: \"d9b8addc-5f49-43c0-a4c3-23ed14252765\") " pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.794255 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-g2zr4" Dec 08 21:40:49 crc kubenswrapper[4791]: I1208 21:40:49.846364 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.510934 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.517836 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.522252 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.522726 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.522958 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-fbnlc" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.523327 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.523511 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.542133 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.593964 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.594059 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.594216 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e94275f5-3fd4-409e-9496-431d35e9b1a5-config\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.594308 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e94275f5-3fd4-409e-9496-431d35e9b1a5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.594379 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e94275f5-3fd4-409e-9496-431d35e9b1a5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.594510 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.594653 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.594763 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm9fw\" (UniqueName: \"kubernetes.io/projected/e94275f5-3fd4-409e-9496-431d35e9b1a5-kube-api-access-rm9fw\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.696384 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm9fw\" (UniqueName: \"kubernetes.io/projected/e94275f5-3fd4-409e-9496-431d35e9b1a5-kube-api-access-rm9fw\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.697384 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.698317 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.698372 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e94275f5-3fd4-409e-9496-431d35e9b1a5-config\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.698405 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e94275f5-3fd4-409e-9496-431d35e9b1a5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.698462 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e94275f5-3fd4-409e-9496-431d35e9b1a5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.698951 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.699027 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.699102 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e94275f5-3fd4-409e-9496-431d35e9b1a5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.699973 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e94275f5-3fd4-409e-9496-431d35e9b1a5-config\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.700343 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e94275f5-3fd4-409e-9496-431d35e9b1a5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.701998 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.702042 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9cba238cffa1b5b0d898fd3e89dbede0e2c1ad9420aac9296952d9b5532318ad/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.703570 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.703806 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.715177 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm9fw\" (UniqueName: \"kubernetes.io/projected/e94275f5-3fd4-409e-9496-431d35e9b1a5-kube-api-access-rm9fw\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.715191 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94275f5-3fd4-409e-9496-431d35e9b1a5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.748975 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4dc0a317-6aec-4741-b92e-2172548adb1a\") pod \"ovsdbserver-nb-0\" (UID: \"e94275f5-3fd4-409e-9496-431d35e9b1a5\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:52 crc kubenswrapper[4791]: I1208 21:40:52.855489 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.455660 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.457589 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.460567 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.461499 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.461786 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.462651 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-bzhsn" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.468406 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.514679 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.514738 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.514791 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.514837 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.514866 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3c915644-77af-432e-a7fe-347c3870382a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3c915644-77af-432e-a7fe-347c3870382a\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.514887 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.514978 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4ct9\" (UniqueName: \"kubernetes.io/projected/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-kube-api-access-m4ct9\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.515055 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616661 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616743 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616767 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616802 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616840 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3c915644-77af-432e-a7fe-347c3870382a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3c915644-77af-432e-a7fe-347c3870382a\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616892 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.616932 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4ct9\" (UniqueName: \"kubernetes.io/projected/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-kube-api-access-m4ct9\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.620309 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.621923 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.622217 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.622974 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.623011 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3c915644-77af-432e-a7fe-347c3870382a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3c915644-77af-432e-a7fe-347c3870382a\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f2b79b17b18e1c29e65662ddb17971bc2c88f1b2bf1da44be1bafdf4e324a6fc/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.624858 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.630344 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.630452 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.642762 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4ct9\" (UniqueName: \"kubernetes.io/projected/cfef56eb-b1e1-48cc-9b1a-c92587748a8d-kube-api-access-m4ct9\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.672312 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3c915644-77af-432e-a7fe-347c3870382a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3c915644-77af-432e-a7fe-347c3870382a\") pod \"ovsdbserver-sb-0\" (UID: \"cfef56eb-b1e1-48cc-9b1a-c92587748a8d\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:53 crc kubenswrapper[4791]: I1208 21:40:53.782312 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.181854 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.182502 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2gmtc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(d9cd6ba2-6502-43cf-8e48-36570ea8e831): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.184559 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="d9cd6ba2-6502-43cf-8e48-36570ea8e831" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.217217 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.217375 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2fxp2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.218558 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.585444 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9" Dec 08 21:41:03 crc kubenswrapper[4791]: E1208 21:41:03.586410 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="d9cd6ba2-6502-43cf-8e48-36570ea8e831" Dec 08 21:41:05 crc kubenswrapper[4791]: E1208 21:41:05.119925 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 08 21:41:05 crc kubenswrapper[4791]: E1208 21:41:05.120113 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-47r7k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-1_openstack(6101a045-4b01-484e-a65b-4c406e458ea1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:05 crc kubenswrapper[4791]: E1208 21:41:05.121277 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-1" podUID="6101a045-4b01-484e-a65b-4c406e458ea1" Dec 08 21:41:05 crc kubenswrapper[4791]: I1208 21:41:05.251351 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:41:05 crc kubenswrapper[4791]: I1208 21:41:05.251694 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:41:05 crc kubenswrapper[4791]: E1208 21:41:05.603308 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-1" podUID="6101a045-4b01-484e-a65b-4c406e458ea1" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.552851 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-trs25"] Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.556134 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.569358 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-trs25"] Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.711342 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-catalog-content\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.711553 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-utilities\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.711892 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hx2n\" (UniqueName: \"kubernetes.io/projected/95730417-3c85-44a0-9a95-8fefeb495a03-kube-api-access-4hx2n\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.813861 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-utilities\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.813989 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hx2n\" (UniqueName: \"kubernetes.io/projected/95730417-3c85-44a0-9a95-8fefeb495a03-kube-api-access-4hx2n\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.814161 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-catalog-content\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.814428 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-utilities\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.814699 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-catalog-content\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.833620 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hx2n\" (UniqueName: \"kubernetes.io/projected/95730417-3c85-44a0-9a95-8fefeb495a03-kube-api-access-4hx2n\") pod \"redhat-operators-trs25\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:10 crc kubenswrapper[4791]: I1208 21:41:10.885736 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.283924 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.284626 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kf82z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-56ffw_openstack(07d0660f-c281-4624-9f12-f524fe2a8092): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.289140 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" podUID="07d0660f-c281-4624-9f12-f524fe2a8092" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.310040 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.310217 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9bht6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-lkjjh_openstack(64298554-5f77-48ff-ace7-8a85487060ba): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.311363 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" podUID="64298554-5f77-48ff-ace7-8a85487060ba" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.326567 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.327002 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mjrgb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-lpdvq_openstack(4e83c5d1-7391-49ef-bc37-decc7363bddb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.328149 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" podUID="4e83c5d1-7391-49ef-bc37-decc7363bddb" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.509169 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.509369 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rlt27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-f7kxb_openstack(da534a4d-ccf5-4297-9a1e-5f3b2ad136f5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.510654 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" podUID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.677176 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" podUID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" Dec 08 21:41:14 crc kubenswrapper[4791]: E1208 21:41:14.677204 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" podUID="07d0660f-c281-4624-9f12-f524fe2a8092" Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.127324 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-trs25"] Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.150621 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-g2zr4"] Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.536905 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 08 21:41:15 crc kubenswrapper[4791]: W1208 21:41:15.545675 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfef56eb_b1e1_48cc_9b1a_c92587748a8d.slice/crio-5c1722476cdc516342337181e9a5c0f00b92f8e2395a7642e12786521f603c76 WatchSource:0}: Error finding container 5c1722476cdc516342337181e9a5c0f00b92f8e2395a7642e12786521f603c76: Status 404 returned error can't find the container with id 5c1722476cdc516342337181e9a5c0f00b92f8e2395a7642e12786521f603c76 Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.690930 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfef56eb-b1e1-48cc-9b1a-c92587748a8d","Type":"ContainerStarted","Data":"5c1722476cdc516342337181e9a5c0f00b92f8e2395a7642e12786521f603c76"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.695782 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-g2zr4" event={"ID":"0ab866e7-9292-4d1c-b55e-6d29c9d23b05","Type":"ContainerStarted","Data":"7c6b048742f82df4d76d3034696f082c65eaf31660a3aaeeb361f2de0aee5ba4"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.704333 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d1c662fa-6e9f-4127-af2f-059adea86bd4","Type":"ContainerStarted","Data":"7e9e4bc886a8b318065f402cabaa69a124c1ca911ec943f1f722122c4c19a93c"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.704688 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.707423 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" event={"ID":"4e83c5d1-7391-49ef-bc37-decc7363bddb","Type":"ContainerDied","Data":"4a760fed59a98d5de29f46c9a6a39949dc047293cea3a7f44ce8ba8ee58d593e"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.707460 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a760fed59a98d5de29f46c9a6a39949dc047293cea3a7f44ce8ba8ee58d593e" Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.709449 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" event={"ID":"64298554-5f77-48ff-ace7-8a85487060ba","Type":"ContainerDied","Data":"0044a6ffddcb7b5651e60338c432e206d1bf4a6a5e5b1094e8e46360ac43e3e9"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.709474 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0044a6ffddcb7b5651e60338c432e206d1bf4a6a5e5b1094e8e46360ac43e3e9" Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.710777 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-trs25" event={"ID":"95730417-3c85-44a0-9a95-8fefeb495a03","Type":"ContainerStarted","Data":"4aedcc7888b61bc586442f0555b29dd0bdaedd14c49cf4c8fe64784f386468b0"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.712030 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0eeeba8c-9e5c-4701-9941-8a324604a18b","Type":"ContainerStarted","Data":"2d4082603887253139ffc0b3ba0d35bbad279c14502b38d97dfd456b7436a860"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.713255 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8e46f211-8213-4219-9389-044888a87181","Type":"ContainerStarted","Data":"7fb529fee36f774952ce6410ee3724778847bbcc91217f48e7348df9b1dd4ede"} Dec 08 21:41:15 crc kubenswrapper[4791]: I1208 21:41:15.737084 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.642458128 podStartE2EDuration="31.73706236s" podCreationTimestamp="2025-12-08 21:40:44 +0000 UTC" firstStartedPulling="2025-12-08 21:40:46.090275953 +0000 UTC m=+1322.789034298" lastFinishedPulling="2025-12-08 21:41:14.184880185 +0000 UTC m=+1350.883638530" observedRunningTime="2025-12-08 21:41:15.721991186 +0000 UTC m=+1352.420749541" watchObservedRunningTime="2025-12-08 21:41:15.73706236 +0000 UTC m=+1352.435820705" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.222373 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-9b4hh"] Dec 08 21:41:16 crc kubenswrapper[4791]: W1208 21:41:16.224848 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9b8addc_5f49_43c0_a4c3_23ed14252765.slice/crio-9d3c609f6a60ddc8493a64719ff00f97ab3bb6ea755d113a96d8cb5320beeaee WatchSource:0}: Error finding container 9d3c609f6a60ddc8493a64719ff00f97ab3bb6ea755d113a96d8cb5320beeaee: Status 404 returned error can't find the container with id 9d3c609f6a60ddc8493a64719ff00f97ab3bb6ea755d113a96d8cb5320beeaee Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.402673 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.413989 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.551986 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e83c5d1-7391-49ef-bc37-decc7363bddb-config\") pod \"4e83c5d1-7391-49ef-bc37-decc7363bddb\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.552502 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-dns-svc\") pod \"64298554-5f77-48ff-ace7-8a85487060ba\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.552576 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bht6\" (UniqueName: \"kubernetes.io/projected/64298554-5f77-48ff-ace7-8a85487060ba-kube-api-access-9bht6\") pod \"64298554-5f77-48ff-ace7-8a85487060ba\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.552628 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjrgb\" (UniqueName: \"kubernetes.io/projected/4e83c5d1-7391-49ef-bc37-decc7363bddb-kube-api-access-mjrgb\") pod \"4e83c5d1-7391-49ef-bc37-decc7363bddb\" (UID: \"4e83c5d1-7391-49ef-bc37-decc7363bddb\") " Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.552680 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-config\") pod \"64298554-5f77-48ff-ace7-8a85487060ba\" (UID: \"64298554-5f77-48ff-ace7-8a85487060ba\") " Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.553412 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e83c5d1-7391-49ef-bc37-decc7363bddb-config" (OuterVolumeSpecName: "config") pod "4e83c5d1-7391-49ef-bc37-decc7363bddb" (UID: "4e83c5d1-7391-49ef-bc37-decc7363bddb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.553789 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-config" (OuterVolumeSpecName: "config") pod "64298554-5f77-48ff-ace7-8a85487060ba" (UID: "64298554-5f77-48ff-ace7-8a85487060ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.554190 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64298554-5f77-48ff-ace7-8a85487060ba" (UID: "64298554-5f77-48ff-ace7-8a85487060ba"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.561654 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e83c5d1-7391-49ef-bc37-decc7363bddb-kube-api-access-mjrgb" (OuterVolumeSpecName: "kube-api-access-mjrgb") pod "4e83c5d1-7391-49ef-bc37-decc7363bddb" (UID: "4e83c5d1-7391-49ef-bc37-decc7363bddb"). InnerVolumeSpecName "kube-api-access-mjrgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.564937 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64298554-5f77-48ff-ace7-8a85487060ba-kube-api-access-9bht6" (OuterVolumeSpecName: "kube-api-access-9bht6") pod "64298554-5f77-48ff-ace7-8a85487060ba" (UID: "64298554-5f77-48ff-ace7-8a85487060ba"). InnerVolumeSpecName "kube-api-access-9bht6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.600672 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 08 21:41:16 crc kubenswrapper[4791]: W1208 21:41:16.608766 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode94275f5_3fd4_409e_9496_431d35e9b1a5.slice/crio-b4b211c0e2e696468643bdb583c5def3252e1206ab09f18be6750d70e49d29b8 WatchSource:0}: Error finding container b4b211c0e2e696468643bdb583c5def3252e1206ab09f18be6750d70e49d29b8: Status 404 returned error can't find the container with id b4b211c0e2e696468643bdb583c5def3252e1206ab09f18be6750d70e49d29b8 Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.655683 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.655748 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bht6\" (UniqueName: \"kubernetes.io/projected/64298554-5f77-48ff-ace7-8a85487060ba-kube-api-access-9bht6\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.655761 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjrgb\" (UniqueName: \"kubernetes.io/projected/4e83c5d1-7391-49ef-bc37-decc7363bddb-kube-api-access-mjrgb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.655769 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64298554-5f77-48ff-ace7-8a85487060ba-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.655779 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e83c5d1-7391-49ef-bc37-decc7363bddb-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.724319 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e94275f5-3fd4-409e-9496-431d35e9b1a5","Type":"ContainerStarted","Data":"b4b211c0e2e696468643bdb583c5def3252e1206ab09f18be6750d70e49d29b8"} Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.727277 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d9cd6ba2-6502-43cf-8e48-36570ea8e831","Type":"ContainerStarted","Data":"6a0637a0f4a66c462da7dc957cd4c2ba75e3dc02cef51ffe6840ae5171578b30"} Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.729367 4791 generic.go:334] "Generic (PLEG): container finished" podID="95730417-3c85-44a0-9a95-8fefeb495a03" containerID="2725de65e588a0e7fa93dc2e03cb488d8b5669484fd6cbb801990b7b02fef5a6" exitCode=0 Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.730229 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-trs25" event={"ID":"95730417-3c85-44a0-9a95-8fefeb495a03","Type":"ContainerDied","Data":"2725de65e588a0e7fa93dc2e03cb488d8b5669484fd6cbb801990b7b02fef5a6"} Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.732649 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9b4hh" event={"ID":"d9b8addc-5f49-43c0-a4c3-23ed14252765","Type":"ContainerStarted","Data":"9d3c609f6a60ddc8493a64719ff00f97ab3bb6ea755d113a96d8cb5320beeaee"} Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.736423 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-2" event={"ID":"87530e07-a720-4b5f-bd6f-c3f8bb540453","Type":"ContainerStarted","Data":"91ad5287af76fcf551cf30d4f96a41d4fef558f7ab29815285c08d54b59a4e7c"} Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.736473 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lpdvq" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.738993 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lkjjh" Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.900267 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lkjjh"] Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.915399 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lkjjh"] Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.961674 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lpdvq"] Dec 08 21:41:16 crc kubenswrapper[4791]: I1208 21:41:16.973396 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lpdvq"] Dec 08 21:41:17 crc kubenswrapper[4791]: I1208 21:41:17.608773 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e83c5d1-7391-49ef-bc37-decc7363bddb" path="/var/lib/kubelet/pods/4e83c5d1-7391-49ef-bc37-decc7363bddb/volumes" Dec 08 21:41:17 crc kubenswrapper[4791]: I1208 21:41:17.610099 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64298554-5f77-48ff-ace7-8a85487060ba" path="/var/lib/kubelet/pods/64298554-5f77-48ff-ace7-8a85487060ba/volumes" Dec 08 21:41:19 crc kubenswrapper[4791]: I1208 21:41:19.764671 4791 generic.go:334] "Generic (PLEG): container finished" podID="8e46f211-8213-4219-9389-044888a87181" containerID="7fb529fee36f774952ce6410ee3724778847bbcc91217f48e7348df9b1dd4ede" exitCode=0 Dec 08 21:41:19 crc kubenswrapper[4791]: I1208 21:41:19.764749 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8e46f211-8213-4219-9389-044888a87181","Type":"ContainerDied","Data":"7fb529fee36f774952ce6410ee3724778847bbcc91217f48e7348df9b1dd4ede"} Dec 08 21:41:19 crc kubenswrapper[4791]: I1208 21:41:19.903891 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.780277 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-g2zr4" event={"ID":"0ab866e7-9292-4d1c-b55e-6d29c9d23b05","Type":"ContainerStarted","Data":"c2cfb6367d14e9b7a2e19e617c4260e6f3d788b3694b1aa32081055436426fb0"} Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.783339 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-g2zr4" Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.787928 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-trs25" event={"ID":"95730417-3c85-44a0-9a95-8fefeb495a03","Type":"ContainerStarted","Data":"2844490bb124b2b0cb9120a218e1c1c2be5253a09ceec9a1503462767511baaf"} Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.790330 4791 generic.go:334] "Generic (PLEG): container finished" podID="d9b8addc-5f49-43c0-a4c3-23ed14252765" containerID="7384778cc82a0d5064671edadf5997dbf421655ee6060e1fd79c388039c3ff2c" exitCode=0 Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.790416 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9b4hh" event={"ID":"d9b8addc-5f49-43c0-a4c3-23ed14252765","Type":"ContainerDied","Data":"7384778cc82a0d5064671edadf5997dbf421655ee6060e1fd79c388039c3ff2c"} Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.792886 4791 generic.go:334] "Generic (PLEG): container finished" podID="0eeeba8c-9e5c-4701-9941-8a324604a18b" containerID="2d4082603887253139ffc0b3ba0d35bbad279c14502b38d97dfd456b7436a860" exitCode=0 Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.793036 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0eeeba8c-9e5c-4701-9941-8a324604a18b","Type":"ContainerDied","Data":"2d4082603887253139ffc0b3ba0d35bbad279c14502b38d97dfd456b7436a860"} Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.799925 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e94275f5-3fd4-409e-9496-431d35e9b1a5","Type":"ContainerStarted","Data":"7a71e1b2808318a892cb6f265f4fdc0a239d39c8f98d1d697c24acd51fb83eb9"} Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.822211 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8e46f211-8213-4219-9389-044888a87181","Type":"ContainerStarted","Data":"3524d40e912342af1b247999d04274b5a8bf35fac9df94cd765c942ffb5a1863"} Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.824624 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfef56eb-b1e1-48cc-9b1a-c92587748a8d","Type":"ContainerStarted","Data":"0709f1e25aa796c8f15de2dfee424fa6855bf29c8a872ce503a005c04352ac31"} Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.825457 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-g2zr4" podStartSLOduration=27.409054809 podStartE2EDuration="31.825432117s" podCreationTimestamp="2025-12-08 21:40:49 +0000 UTC" firstStartedPulling="2025-12-08 21:41:15.161787456 +0000 UTC m=+1351.860545801" lastFinishedPulling="2025-12-08 21:41:19.578164764 +0000 UTC m=+1356.276923109" observedRunningTime="2025-12-08 21:41:20.805168205 +0000 UTC m=+1357.503926560" watchObservedRunningTime="2025-12-08 21:41:20.825432117 +0000 UTC m=+1357.524190462" Dec 08 21:41:20 crc kubenswrapper[4791]: I1208 21:41:20.918229 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=9.594458293 podStartE2EDuration="37.918213688s" podCreationTimestamp="2025-12-08 21:40:43 +0000 UTC" firstStartedPulling="2025-12-08 21:40:45.860500195 +0000 UTC m=+1322.559258540" lastFinishedPulling="2025-12-08 21:41:14.18425559 +0000 UTC m=+1350.883013935" observedRunningTime="2025-12-08 21:41:20.915135552 +0000 UTC m=+1357.613893907" watchObservedRunningTime="2025-12-08 21:41:20.918213688 +0000 UTC m=+1357.616972033" Dec 08 21:41:21 crc kubenswrapper[4791]: I1208 21:41:21.840465 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9","Type":"ContainerStarted","Data":"98c8e4efd467bfd4b9f8c3e964d4dac9a711ce08771957c180d136b9be560757"} Dec 08 21:41:21 crc kubenswrapper[4791]: I1208 21:41:21.843281 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9b4hh" event={"ID":"d9b8addc-5f49-43c0-a4c3-23ed14252765","Type":"ContainerStarted","Data":"cc68c48aa91954bd56508e5272d0140bb9b4d52c13c033195b409ea53c7e4680"} Dec 08 21:41:21 crc kubenswrapper[4791]: I1208 21:41:21.846485 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0eeeba8c-9e5c-4701-9941-8a324604a18b","Type":"ContainerStarted","Data":"90d0d5e6ecb96649e1fe2cfefec1a347a70f71eb38f901d7e7c060c88dacb762"} Dec 08 21:41:21 crc kubenswrapper[4791]: I1208 21:41:21.851801 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-1" event={"ID":"6101a045-4b01-484e-a65b-4c406e458ea1","Type":"ContainerStarted","Data":"fa76d2f6266b5898e49b028af878b3eaa819a1d539b7e2d1e7c5332a2ee431f0"} Dec 08 21:41:21 crc kubenswrapper[4791]: I1208 21:41:21.922938 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=10.399688281 podStartE2EDuration="40.922920658s" podCreationTimestamp="2025-12-08 21:40:41 +0000 UTC" firstStartedPulling="2025-12-08 21:40:43.974657679 +0000 UTC m=+1320.673416024" lastFinishedPulling="2025-12-08 21:41:14.497890056 +0000 UTC m=+1351.196648401" observedRunningTime="2025-12-08 21:41:21.915025423 +0000 UTC m=+1358.613783768" watchObservedRunningTime="2025-12-08 21:41:21.922920658 +0000 UTC m=+1358.621679003" Dec 08 21:41:22 crc kubenswrapper[4791]: I1208 21:41:22.862873 4791 generic.go:334] "Generic (PLEG): container finished" podID="95730417-3c85-44a0-9a95-8fefeb495a03" containerID="2844490bb124b2b0cb9120a218e1c1c2be5253a09ceec9a1503462767511baaf" exitCode=0 Dec 08 21:41:22 crc kubenswrapper[4791]: I1208 21:41:22.862956 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-trs25" event={"ID":"95730417-3c85-44a0-9a95-8fefeb495a03","Type":"ContainerDied","Data":"2844490bb124b2b0cb9120a218e1c1c2be5253a09ceec9a1503462767511baaf"} Dec 08 21:41:22 crc kubenswrapper[4791]: I1208 21:41:22.866840 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9b4hh" event={"ID":"d9b8addc-5f49-43c0-a4c3-23ed14252765","Type":"ContainerStarted","Data":"e347ff70148b7a72b94ff2e74efcddeb3f65cdaf494d7f411fe92f1ef817cc40"} Dec 08 21:41:22 crc kubenswrapper[4791]: I1208 21:41:22.867006 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:41:22 crc kubenswrapper[4791]: I1208 21:41:22.904589 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-9b4hh" podStartSLOduration=30.816302353 podStartE2EDuration="33.904571062s" podCreationTimestamp="2025-12-08 21:40:49 +0000 UTC" firstStartedPulling="2025-12-08 21:41:16.22791755 +0000 UTC m=+1352.926675895" lastFinishedPulling="2025-12-08 21:41:19.316186259 +0000 UTC m=+1356.014944604" observedRunningTime="2025-12-08 21:41:22.903361312 +0000 UTC m=+1359.602119657" watchObservedRunningTime="2025-12-08 21:41:22.904571062 +0000 UTC m=+1359.603329407" Dec 08 21:41:23 crc kubenswrapper[4791]: I1208 21:41:23.009276 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 08 21:41:23 crc kubenswrapper[4791]: I1208 21:41:23.009333 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 08 21:41:23 crc kubenswrapper[4791]: I1208 21:41:23.877417 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:41:24 crc kubenswrapper[4791]: I1208 21:41:24.869979 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 08 21:41:24 crc kubenswrapper[4791]: I1208 21:41:24.870027 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 08 21:41:26 crc kubenswrapper[4791]: I1208 21:41:26.698819 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-56ffw"] Dec 08 21:41:26 crc kubenswrapper[4791]: I1208 21:41:26.763824 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-l5vmk"] Dec 08 21:41:26 crc kubenswrapper[4791]: I1208 21:41:26.766553 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:26 crc kubenswrapper[4791]: I1208 21:41:26.795274 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-l5vmk"] Dec 08 21:41:26 crc kubenswrapper[4791]: I1208 21:41:26.905959 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-config\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:26 crc kubenswrapper[4791]: I1208 21:41:26.906098 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzp8c\" (UniqueName: \"kubernetes.io/projected/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-kube-api-access-gzp8c\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:26 crc kubenswrapper[4791]: I1208 21:41:26.906175 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.009253 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-config\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.009340 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzp8c\" (UniqueName: \"kubernetes.io/projected/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-kube-api-access-gzp8c\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.009384 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.010170 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-config\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.011399 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.037367 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzp8c\" (UniqueName: \"kubernetes.io/projected/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-kube-api-access-gzp8c\") pod \"dnsmasq-dns-7cb5889db5-l5vmk\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.102646 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.944284 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.956984 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.961607 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.961845 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.961957 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-6h46g" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.962172 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 08 21:41:27 crc kubenswrapper[4791]: I1208 21:41:27.969113 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.110779 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.145794 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7f7de8af-60f4-4571-bc47-95cb97ce0121-cache\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.146009 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7f7de8af-60f4-4571-bc47-95cb97ce0121-lock\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.146071 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.146139 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slgjc\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-kube-api-access-slgjc\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.146255 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.247382 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-dns-svc\") pod \"07d0660f-c281-4624-9f12-f524fe2a8092\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.247480 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf82z\" (UniqueName: \"kubernetes.io/projected/07d0660f-c281-4624-9f12-f524fe2a8092-kube-api-access-kf82z\") pod \"07d0660f-c281-4624-9f12-f524fe2a8092\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.247548 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-config\") pod \"07d0660f-c281-4624-9f12-f524fe2a8092\" (UID: \"07d0660f-c281-4624-9f12-f524fe2a8092\") " Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.247833 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.247932 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7f7de8af-60f4-4571-bc47-95cb97ce0121-cache\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.247991 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7f7de8af-60f4-4571-bc47-95cb97ce0121-lock\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.248014 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.248025 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "07d0660f-c281-4624-9f12-f524fe2a8092" (UID: "07d0660f-c281-4624-9f12-f524fe2a8092"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.248038 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slgjc\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-kube-api-access-slgjc\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: E1208 21:41:28.248073 4791 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:41:28 crc kubenswrapper[4791]: E1208 21:41:28.248093 4791 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:41:28 crc kubenswrapper[4791]: E1208 21:41:28.248152 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift podName:7f7de8af-60f4-4571-bc47-95cb97ce0121 nodeName:}" failed. No retries permitted until 2025-12-08 21:41:28.748129804 +0000 UTC m=+1365.446888219 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift") pod "swift-storage-0" (UID: "7f7de8af-60f4-4571-bc47-95cb97ce0121") : configmap "swift-ring-files" not found Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.248319 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.248469 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/7f7de8af-60f4-4571-bc47-95cb97ce0121-cache\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.248515 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-config" (OuterVolumeSpecName: "config") pod "07d0660f-c281-4624-9f12-f524fe2a8092" (UID: "07d0660f-c281-4624-9f12-f524fe2a8092"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.248854 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/7f7de8af-60f4-4571-bc47-95cb97ce0121-lock\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.253273 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.253315 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/400e152bd2c3b4e9a28c192861bbe7e9bfe591f3c21fa3b0f2e31a862fe4de43/globalmount\"" pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.253905 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07d0660f-c281-4624-9f12-f524fe2a8092-kube-api-access-kf82z" (OuterVolumeSpecName: "kube-api-access-kf82z") pod "07d0660f-c281-4624-9f12-f524fe2a8092" (UID: "07d0660f-c281-4624-9f12-f524fe2a8092"). InnerVolumeSpecName "kube-api-access-kf82z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.267307 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slgjc\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-kube-api-access-slgjc\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.296200 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aad006da-c8d2-484f-8a35-5028aee24ef7\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.350788 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf82z\" (UniqueName: \"kubernetes.io/projected/07d0660f-c281-4624-9f12-f524fe2a8092-kube-api-access-kf82z\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.350830 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d0660f-c281-4624-9f12-f524fe2a8092-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.758738 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:28 crc kubenswrapper[4791]: E1208 21:41:28.759544 4791 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:41:28 crc kubenswrapper[4791]: E1208 21:41:28.759566 4791 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:41:28 crc kubenswrapper[4791]: E1208 21:41:28.759609 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift podName:7f7de8af-60f4-4571-bc47-95cb97ce0121 nodeName:}" failed. No retries permitted until 2025-12-08 21:41:29.759592309 +0000 UTC m=+1366.458350654 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift") pod "swift-storage-0" (UID: "7f7de8af-60f4-4571-bc47-95cb97ce0121") : configmap "swift-ring-files" not found Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.927424 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" event={"ID":"07d0660f-c281-4624-9f12-f524fe2a8092","Type":"ContainerDied","Data":"6567160b1cce2081238a19664dd2e271a7131495cc20449589c5ea74ceafede5"} Dec 08 21:41:28 crc kubenswrapper[4791]: I1208 21:41:28.927528 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-56ffw" Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.046585 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-56ffw"] Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.055956 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-56ffw"] Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.174434 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-l5vmk"] Dec 08 21:41:29 crc kubenswrapper[4791]: W1208 21:41:29.195940 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1bb18f6_32f3_4146_aa7b_b423b5ac262a.slice/crio-f8b5b1480ba74601ed0c213032e47fbd3ca975bd09ac2c738cd8d691f60a0f34 WatchSource:0}: Error finding container f8b5b1480ba74601ed0c213032e47fbd3ca975bd09ac2c738cd8d691f60a0f34: Status 404 returned error can't find the container with id f8b5b1480ba74601ed0c213032e47fbd3ca975bd09ac2c738cd8d691f60a0f34 Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.609586 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07d0660f-c281-4624-9f12-f524fe2a8092" path="/var/lib/kubelet/pods/07d0660f-c281-4624-9f12-f524fe2a8092/volumes" Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.781288 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:29 crc kubenswrapper[4791]: E1208 21:41:29.781583 4791 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:41:29 crc kubenswrapper[4791]: E1208 21:41:29.781601 4791 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:41:29 crc kubenswrapper[4791]: E1208 21:41:29.781660 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift podName:7f7de8af-60f4-4571-bc47-95cb97ce0121 nodeName:}" failed. No retries permitted until 2025-12-08 21:41:31.781643267 +0000 UTC m=+1368.480401612 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift") pod "swift-storage-0" (UID: "7f7de8af-60f4-4571-bc47-95cb97ce0121") : configmap "swift-ring-files" not found Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.937497 4791 generic.go:334] "Generic (PLEG): container finished" podID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" containerID="e386c8bd53fa4c5e5c4472fbd88c2d17fde648a08e32e1c40d7f3655b13510ec" exitCode=0 Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.937955 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" event={"ID":"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5","Type":"ContainerDied","Data":"e386c8bd53fa4c5e5c4472fbd88c2d17fde648a08e32e1c40d7f3655b13510ec"} Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.941442 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfef56eb-b1e1-48cc-9b1a-c92587748a8d","Type":"ContainerStarted","Data":"f040132c7ab54a90b016f6a2b04afb3403e7ad3b48282924dd88199e4b514120"} Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.944442 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-trs25" event={"ID":"95730417-3c85-44a0-9a95-8fefeb495a03","Type":"ContainerStarted","Data":"e2b0a5b74d1d79c81f058ce34ad85e975ef4627c890e756ef41d48baaf6a1b30"} Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.945485 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" event={"ID":"f1bb18f6-32f3-4146-aa7b-b423b5ac262a","Type":"ContainerStarted","Data":"f8b5b1480ba74601ed0c213032e47fbd3ca975bd09ac2c738cd8d691f60a0f34"} Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.951069 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e94275f5-3fd4-409e-9496-431d35e9b1a5","Type":"ContainerStarted","Data":"ed1bc5aeea99f7fa01882b1b040ae6b8c7d5c07268c88d88e9ff3d39e6b45ecf"} Dec 08 21:41:29 crc kubenswrapper[4791]: I1208 21:41:29.992919 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=26.816353602 podStartE2EDuration="38.99289751s" podCreationTimestamp="2025-12-08 21:40:51 +0000 UTC" firstStartedPulling="2025-12-08 21:41:16.615087669 +0000 UTC m=+1353.313846014" lastFinishedPulling="2025-12-08 21:41:28.791631577 +0000 UTC m=+1365.490389922" observedRunningTime="2025-12-08 21:41:29.982520405 +0000 UTC m=+1366.681278750" watchObservedRunningTime="2025-12-08 21:41:29.99289751 +0000 UTC m=+1366.691655845" Dec 08 21:41:30 crc kubenswrapper[4791]: I1208 21:41:30.003895 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=24.827449738 podStartE2EDuration="38.00387688s" podCreationTimestamp="2025-12-08 21:40:52 +0000 UTC" firstStartedPulling="2025-12-08 21:41:15.549380336 +0000 UTC m=+1352.248138681" lastFinishedPulling="2025-12-08 21:41:28.725807478 +0000 UTC m=+1365.424565823" observedRunningTime="2025-12-08 21:41:30.003100421 +0000 UTC m=+1366.701858766" watchObservedRunningTime="2025-12-08 21:41:30.00387688 +0000 UTC m=+1366.702635235" Dec 08 21:41:30 crc kubenswrapper[4791]: I1208 21:41:30.027800 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-trs25" podStartSLOduration=7.96859562 podStartE2EDuration="20.027779618s" podCreationTimestamp="2025-12-08 21:41:10 +0000 UTC" firstStartedPulling="2025-12-08 21:41:16.732017808 +0000 UTC m=+1353.430776153" lastFinishedPulling="2025-12-08 21:41:28.791201806 +0000 UTC m=+1365.489960151" observedRunningTime="2025-12-08 21:41:30.025134513 +0000 UTC m=+1366.723892868" watchObservedRunningTime="2025-12-08 21:41:30.027779618 +0000 UTC m=+1366.726537963" Dec 08 21:41:30 crc kubenswrapper[4791]: E1208 21:41:30.361932 4791 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 08 21:41:30 crc kubenswrapper[4791]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 08 21:41:30 crc kubenswrapper[4791]: > podSandboxID="063083cd026427a6ecbe95ffde8ce474e823ed62bd1e9b6ef3b9c10171f039ac" Dec 08 21:41:30 crc kubenswrapper[4791]: E1208 21:41:30.362159 4791 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 08 21:41:30 crc kubenswrapper[4791]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rlt27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-f7kxb_openstack(da534a4d-ccf5-4297-9a1e-5f3b2ad136f5): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 08 21:41:30 crc kubenswrapper[4791]: > logger="UnhandledError" Dec 08 21:41:30 crc kubenswrapper[4791]: E1208 21:41:30.363308 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" podUID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" Dec 08 21:41:30 crc kubenswrapper[4791]: I1208 21:41:30.886600 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:30 crc kubenswrapper[4791]: I1208 21:41:30.888047 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.826396 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:31 crc kubenswrapper[4791]: E1208 21:41:31.826611 4791 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:41:31 crc kubenswrapper[4791]: E1208 21:41:31.826855 4791 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:41:31 crc kubenswrapper[4791]: E1208 21:41:31.826915 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift podName:7f7de8af-60f4-4571-bc47-95cb97ce0121 nodeName:}" failed. No retries permitted until 2025-12-08 21:41:35.82689765 +0000 UTC m=+1372.525655995 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift") pod "swift-storage-0" (UID: "7f7de8af-60f4-4571-bc47-95cb97ce0121") : configmap "swift-ring-files" not found Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.841365 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-249fw"] Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.843062 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.852598 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-249fw"] Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.853114 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.853353 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.853188 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.856004 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.934002 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-swiftconf\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.934381 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5a5de61a-f218-4e36-afaf-2cab04468093-etc-swift\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.934464 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-ring-data-devices\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.934604 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-combined-ca-bundle\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.934774 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bg7m\" (UniqueName: \"kubernetes.io/projected/5a5de61a-f218-4e36-afaf-2cab04468093-kube-api-access-6bg7m\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.934945 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-scripts\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.935070 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-dispersionconf\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.953073 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-trs25" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="registry-server" probeResult="failure" output=< Dec 08 21:41:31 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 21:41:31 crc kubenswrapper[4791]: > Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.985998 4791 generic.go:334] "Generic (PLEG): container finished" podID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerID="421e30883ecfcbf4c08c7b4af42d73085551b6ba36ef021bf495324fb46817ca" exitCode=0 Dec 08 21:41:31 crc kubenswrapper[4791]: I1208 21:41:31.987436 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" event={"ID":"f1bb18f6-32f3-4146-aa7b-b423b5ac262a","Type":"ContainerDied","Data":"421e30883ecfcbf4c08c7b4af42d73085551b6ba36ef021bf495324fb46817ca"} Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.038218 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bg7m\" (UniqueName: \"kubernetes.io/projected/5a5de61a-f218-4e36-afaf-2cab04468093-kube-api-access-6bg7m\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.038295 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-scripts\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.038374 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-dispersionconf\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.038440 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-swiftconf\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.038468 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5a5de61a-f218-4e36-afaf-2cab04468093-etc-swift\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.038486 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-ring-data-devices\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.038586 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-combined-ca-bundle\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.053377 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-scripts\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.055009 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-combined-ca-bundle\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.056047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5a5de61a-f218-4e36-afaf-2cab04468093-etc-swift\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.065567 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-ring-data-devices\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.075181 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bg7m\" (UniqueName: \"kubernetes.io/projected/5a5de61a-f218-4e36-afaf-2cab04468093-kube-api-access-6bg7m\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.075522 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.075796 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-dispersionconf\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.079854 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-swiftconf\") pod \"swift-ring-rebalance-249fw\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.169389 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:32 crc kubenswrapper[4791]: W1208 21:41:32.616881 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a5de61a_f218_4e36_afaf_2cab04468093.slice/crio-5271e2c781f7894fa596ccfdc6daf80e1eeabcd29ab54582cbd21032795fc9fb WatchSource:0}: Error finding container 5271e2c781f7894fa596ccfdc6daf80e1eeabcd29ab54582cbd21032795fc9fb: Status 404 returned error can't find the container with id 5271e2c781f7894fa596ccfdc6daf80e1eeabcd29ab54582cbd21032795fc9fb Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.617481 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-249fw"] Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.783187 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.824634 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.855794 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.893231 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.995291 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-249fw" event={"ID":"5a5de61a-f218-4e36-afaf-2cab04468093","Type":"ContainerStarted","Data":"5271e2c781f7894fa596ccfdc6daf80e1eeabcd29ab54582cbd21032795fc9fb"} Dec 08 21:41:32 crc kubenswrapper[4791]: I1208 21:41:32.995752 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.036865 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.210951 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-f7kxb"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.271596 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-q4qnx"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.276398 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.282902 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-85s7k"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.284835 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.289974 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.292258 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.307280 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-q4qnx"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.337189 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-85s7k"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369357 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/eb1376f4-3d74-4175-8635-500b29e89984-ovs-rundir\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369462 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb1376f4-3d74-4175-8635-500b29e89984-config\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369490 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-dns-svc\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369525 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhgz7\" (UniqueName: \"kubernetes.io/projected/523f1b51-d3bc-419c-ac74-63559640838c-kube-api-access-xhgz7\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369553 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwcpf\" (UniqueName: \"kubernetes.io/projected/eb1376f4-3d74-4175-8635-500b29e89984-kube-api-access-rwcpf\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369641 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb1376f4-3d74-4175-8635-500b29e89984-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369678 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-config\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369731 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb1376f4-3d74-4175-8635-500b29e89984-combined-ca-bundle\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369752 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.369782 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/eb1376f4-3d74-4175-8635-500b29e89984-ovn-rundir\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.422798 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-l5vmk"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.440120 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.442001 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.446824 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.447012 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.447123 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-jjscp" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.447933 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.449387 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472117 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-config\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472195 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb1376f4-3d74-4175-8635-500b29e89984-combined-ca-bundle\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472237 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472271 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/eb1376f4-3d74-4175-8635-500b29e89984-ovn-rundir\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472336 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/eb1376f4-3d74-4175-8635-500b29e89984-ovs-rundir\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472388 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb1376f4-3d74-4175-8635-500b29e89984-config\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472412 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-dns-svc\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472454 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhgz7\" (UniqueName: \"kubernetes.io/projected/523f1b51-d3bc-419c-ac74-63559640838c-kube-api-access-xhgz7\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472480 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwcpf\" (UniqueName: \"kubernetes.io/projected/eb1376f4-3d74-4175-8635-500b29e89984-kube-api-access-rwcpf\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.472575 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb1376f4-3d74-4175-8635-500b29e89984-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.474379 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/eb1376f4-3d74-4175-8635-500b29e89984-ovs-rundir\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.478411 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-7cwhm"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.478588 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.478588 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb1376f4-3d74-4175-8635-500b29e89984-config\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.478858 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/eb1376f4-3d74-4175-8635-500b29e89984-ovn-rundir\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.479248 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-dns-svc\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.479824 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-config\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.480760 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.486989 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.491584 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb1376f4-3d74-4175-8635-500b29e89984-combined-ca-bundle\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.506421 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb1376f4-3d74-4175-8635-500b29e89984-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.507047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhgz7\" (UniqueName: \"kubernetes.io/projected/523f1b51-d3bc-419c-ac74-63559640838c-kube-api-access-xhgz7\") pod \"dnsmasq-dns-57d65f699f-q4qnx\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.509428 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwcpf\" (UniqueName: \"kubernetes.io/projected/eb1376f4-3d74-4175-8635-500b29e89984-kube-api-access-rwcpf\") pod \"ovn-controller-metrics-85s7k\" (UID: \"eb1376f4-3d74-4175-8635-500b29e89984\") " pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.525133 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-7cwhm"] Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577020 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577120 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e2977e67-f037-4524-bfb5-0b04940113f7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577148 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm9sx\" (UniqueName: \"kubernetes.io/projected/4ad73e16-b73d-48d2-9968-934d17c0dea1-kube-api-access-wm9sx\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577234 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577294 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577323 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-config\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577385 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsrf5\" (UniqueName: \"kubernetes.io/projected/e2977e67-f037-4524-bfb5-0b04940113f7-kube-api-access-hsrf5\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577418 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e2977e67-f037-4524-bfb5-0b04940113f7-scripts\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577760 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577814 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2977e67-f037-4524-bfb5-0b04940113f7-config\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577865 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.577979 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.614692 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.636059 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-85s7k" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.679914 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.679985 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680005 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-config\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680043 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsrf5\" (UniqueName: \"kubernetes.io/projected/e2977e67-f037-4524-bfb5-0b04940113f7-kube-api-access-hsrf5\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680064 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e2977e67-f037-4524-bfb5-0b04940113f7-scripts\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680213 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680241 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2977e67-f037-4524-bfb5-0b04940113f7-config\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680261 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680316 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680345 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680369 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e2977e67-f037-4524-bfb5-0b04940113f7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.680383 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm9sx\" (UniqueName: \"kubernetes.io/projected/4ad73e16-b73d-48d2-9968-934d17c0dea1-kube-api-access-wm9sx\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.693866 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.695589 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.696361 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e2977e67-f037-4524-bfb5-0b04940113f7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.698329 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e2977e67-f037-4524-bfb5-0b04940113f7-scripts\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.699509 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2977e67-f037-4524-bfb5-0b04940113f7-config\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.700332 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-config\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.704113 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.720213 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.720558 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm9sx\" (UniqueName: \"kubernetes.io/projected/4ad73e16-b73d-48d2-9968-934d17c0dea1-kube-api-access-wm9sx\") pod \"dnsmasq-dns-b8fbc5445-7cwhm\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.721359 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.725265 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2977e67-f037-4524-bfb5-0b04940113f7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.730880 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsrf5\" (UniqueName: \"kubernetes.io/projected/e2977e67-f037-4524-bfb5-0b04940113f7-kube-api-access-hsrf5\") pod \"ovn-northd-0\" (UID: \"e2977e67-f037-4524-bfb5-0b04940113f7\") " pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.737229 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.772506 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.793157 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.884179 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-dns-svc\") pod \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.884636 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-config\") pod \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.885137 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlt27\" (UniqueName: \"kubernetes.io/projected/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-kube-api-access-rlt27\") pod \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\" (UID: \"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5\") " Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.889988 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-kube-api-access-rlt27" (OuterVolumeSpecName: "kube-api-access-rlt27") pod "da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" (UID: "da534a4d-ccf5-4297-9a1e-5f3b2ad136f5"). InnerVolumeSpecName "kube-api-access-rlt27". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.893526 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.910642 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.988682 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlt27\" (UniqueName: \"kubernetes.io/projected/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-kube-api-access-rlt27\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:33 crc kubenswrapper[4791]: I1208 21:41:33.999544 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-config" (OuterVolumeSpecName: "config") pod "da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" (UID: "da534a4d-ccf5-4297-9a1e-5f3b2ad136f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.044957 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" event={"ID":"f1bb18f6-32f3-4146-aa7b-b423b5ac262a","Type":"ContainerStarted","Data":"4d0af36019015bee5fa6ff7e8ceba5150c5ec0dc86c0fd5ab4d2d928d18ce5e1"} Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.046796 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.064217 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.064379 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-f7kxb" event={"ID":"da534a4d-ccf5-4297-9a1e-5f3b2ad136f5","Type":"ContainerDied","Data":"063083cd026427a6ecbe95ffde8ce474e823ed62bd1e9b6ef3b9c10171f039ac"} Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.064408 4791 scope.go:117] "RemoveContainer" containerID="e386c8bd53fa4c5e5c4472fbd88c2d17fde648a08e32e1c40d7f3655b13510ec" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.091281 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" podStartSLOduration=8.09125779 podStartE2EDuration="8.09125779s" podCreationTimestamp="2025-12-08 21:41:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:34.073739149 +0000 UTC m=+1370.772497514" watchObservedRunningTime="2025-12-08 21:41:34.09125779 +0000 UTC m=+1370.790016125" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.091794 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.113657 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" (UID: "da534a4d-ccf5-4297-9a1e-5f3b2ad136f5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.194975 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.229513 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-85s7k"] Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.462662 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-f7kxb"] Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.475667 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-q4qnx"] Dec 08 21:41:34 crc kubenswrapper[4791]: W1208 21:41:34.485641 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod523f1b51_d3bc_419c_ac74_63559640838c.slice/crio-1550fb1e8833664f58980968f42b7ca46d38cc617bfa04b476535e30334b675c WatchSource:0}: Error finding container 1550fb1e8833664f58980968f42b7ca46d38cc617bfa04b476535e30334b675c: Status 404 returned error can't find the container with id 1550fb1e8833664f58980968f42b7ca46d38cc617bfa04b476535e30334b675c Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.491530 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-f7kxb"] Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.556473 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 08 21:41:34 crc kubenswrapper[4791]: I1208 21:41:34.649641 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-7cwhm"] Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.090680 4791 generic.go:334] "Generic (PLEG): container finished" podID="523f1b51-d3bc-419c-ac74-63559640838c" containerID="ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced" exitCode=0 Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.090810 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" event={"ID":"523f1b51-d3bc-419c-ac74-63559640838c","Type":"ContainerDied","Data":"ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced"} Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.091054 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" event={"ID":"523f1b51-d3bc-419c-ac74-63559640838c","Type":"ContainerStarted","Data":"1550fb1e8833664f58980968f42b7ca46d38cc617bfa04b476535e30334b675c"} Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.094383 4791 generic.go:334] "Generic (PLEG): container finished" podID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerID="8bf23f9d0e1e05ac540011b43d16d434ad1c2856b16805372986ab78033b0d6f" exitCode=0 Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.094453 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" event={"ID":"4ad73e16-b73d-48d2-9968-934d17c0dea1","Type":"ContainerDied","Data":"8bf23f9d0e1e05ac540011b43d16d434ad1c2856b16805372986ab78033b0d6f"} Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.094511 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" event={"ID":"4ad73e16-b73d-48d2-9968-934d17c0dea1","Type":"ContainerStarted","Data":"779888df60463190360972dcb878af1c8adab92b8d091ca4ea59aad097b7ce32"} Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.097305 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-85s7k" event={"ID":"eb1376f4-3d74-4175-8635-500b29e89984","Type":"ContainerStarted","Data":"aba508ab31994be9e83d5f5d39ae4195469a443861720cbfcaee136dc040052c"} Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.097339 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-85s7k" event={"ID":"eb1376f4-3d74-4175-8635-500b29e89984","Type":"ContainerStarted","Data":"04c2b36919ae2f3d502ded353ff00ff874b68af80c2226ae009af9a7540225e7"} Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.100539 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e2977e67-f037-4524-bfb5-0b04940113f7","Type":"ContainerStarted","Data":"365c1b2798020b7b82fad016d6ed6e934706f1448d0ff6f93cb7c467d83c3bc0"} Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.100747 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" podUID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerName="dnsmasq-dns" containerID="cri-o://4d0af36019015bee5fa6ff7e8ceba5150c5ec0dc86c0fd5ab4d2d928d18ce5e1" gracePeriod=10 Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.131877 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-85s7k" podStartSLOduration=2.131859873 podStartE2EDuration="2.131859873s" podCreationTimestamp="2025-12-08 21:41:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:35.128338316 +0000 UTC m=+1371.827096701" watchObservedRunningTime="2025-12-08 21:41:35.131859873 +0000 UTC m=+1371.830618218" Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.251294 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.251360 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.612595 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" path="/var/lib/kubelet/pods/da534a4d-ccf5-4297-9a1e-5f3b2ad136f5/volumes" Dec 08 21:41:35 crc kubenswrapper[4791]: I1208 21:41:35.846414 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:35 crc kubenswrapper[4791]: E1208 21:41:35.846599 4791 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:41:35 crc kubenswrapper[4791]: E1208 21:41:35.846764 4791 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:41:35 crc kubenswrapper[4791]: E1208 21:41:35.846832 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift podName:7f7de8af-60f4-4571-bc47-95cb97ce0121 nodeName:}" failed. No retries permitted until 2025-12-08 21:41:43.84681207 +0000 UTC m=+1380.545570415 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift") pod "swift-storage-0" (UID: "7f7de8af-60f4-4571-bc47-95cb97ce0121") : configmap "swift-ring-files" not found Dec 08 21:41:36 crc kubenswrapper[4791]: I1208 21:41:36.122346 4791 generic.go:334] "Generic (PLEG): container finished" podID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerID="4d0af36019015bee5fa6ff7e8ceba5150c5ec0dc86c0fd5ab4d2d928d18ce5e1" exitCode=0 Dec 08 21:41:36 crc kubenswrapper[4791]: I1208 21:41:36.122812 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" event={"ID":"f1bb18f6-32f3-4146-aa7b-b423b5ac262a","Type":"ContainerDied","Data":"4d0af36019015bee5fa6ff7e8ceba5150c5ec0dc86c0fd5ab4d2d928d18ce5e1"} Dec 08 21:41:37 crc kubenswrapper[4791]: I1208 21:41:37.212417 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 08 21:41:37 crc kubenswrapper[4791]: I1208 21:41:37.311223 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.078260 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.129387 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzp8c\" (UniqueName: \"kubernetes.io/projected/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-kube-api-access-gzp8c\") pod \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.129614 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-dns-svc\") pod \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.129655 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-config\") pod \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\" (UID: \"f1bb18f6-32f3-4146-aa7b-b423b5ac262a\") " Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.133125 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-kube-api-access-gzp8c" (OuterVolumeSpecName: "kube-api-access-gzp8c") pod "f1bb18f6-32f3-4146-aa7b-b423b5ac262a" (UID: "f1bb18f6-32f3-4146-aa7b-b423b5ac262a"). InnerVolumeSpecName "kube-api-access-gzp8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.151661 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-249fw" event={"ID":"5a5de61a-f218-4e36-afaf-2cab04468093","Type":"ContainerStarted","Data":"b49673eb5fcd6da7a31d0903b0ef603fb3c52b2d6b555e37ce75e071e7075df6"} Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.157219 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e2977e67-f037-4524-bfb5-0b04940113f7","Type":"ContainerStarted","Data":"60255065311f7c008d5b7e97ea20869c0571e7c6afa6ebe1c8d70f8c6d68be26"} Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.159476 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" event={"ID":"523f1b51-d3bc-419c-ac74-63559640838c","Type":"ContainerStarted","Data":"471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924"} Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.159685 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.161886 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.161984 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-l5vmk" event={"ID":"f1bb18f6-32f3-4146-aa7b-b423b5ac262a","Type":"ContainerDied","Data":"f8b5b1480ba74601ed0c213032e47fbd3ca975bd09ac2c738cd8d691f60a0f34"} Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.162041 4791 scope.go:117] "RemoveContainer" containerID="4d0af36019015bee5fa6ff7e8ceba5150c5ec0dc86c0fd5ab4d2d928d18ce5e1" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.167337 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" event={"ID":"4ad73e16-b73d-48d2-9968-934d17c0dea1","Type":"ContainerStarted","Data":"9235cecb8e67c24fa098fba18ee7fa46c517692f9caabd3e89d5ed8874928a35"} Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.167611 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.191017 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-249fw" podStartSLOduration=2.019792238 podStartE2EDuration="8.190992127s" podCreationTimestamp="2025-12-08 21:41:31 +0000 UTC" firstStartedPulling="2025-12-08 21:41:32.619778593 +0000 UTC m=+1369.318536938" lastFinishedPulling="2025-12-08 21:41:38.790978472 +0000 UTC m=+1375.489736827" observedRunningTime="2025-12-08 21:41:39.175303451 +0000 UTC m=+1375.874061796" watchObservedRunningTime="2025-12-08 21:41:39.190992127 +0000 UTC m=+1375.889750472" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.197397 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" podStartSLOduration=6.197377514 podStartE2EDuration="6.197377514s" podCreationTimestamp="2025-12-08 21:41:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:39.196504672 +0000 UTC m=+1375.895263017" watchObservedRunningTime="2025-12-08 21:41:39.197377514 +0000 UTC m=+1375.896135869" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.198991 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-config" (OuterVolumeSpecName: "config") pod "f1bb18f6-32f3-4146-aa7b-b423b5ac262a" (UID: "f1bb18f6-32f3-4146-aa7b-b423b5ac262a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.205070 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f1bb18f6-32f3-4146-aa7b-b423b5ac262a" (UID: "f1bb18f6-32f3-4146-aa7b-b423b5ac262a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.205191 4791 scope.go:117] "RemoveContainer" containerID="421e30883ecfcbf4c08c7b4af42d73085551b6ba36ef021bf495324fb46817ca" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.229198 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" podStartSLOduration=6.229179515 podStartE2EDuration="6.229179515s" podCreationTimestamp="2025-12-08 21:41:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:39.218921633 +0000 UTC m=+1375.917679978" watchObservedRunningTime="2025-12-08 21:41:39.229179515 +0000 UTC m=+1375.927937870" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.232413 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.232437 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.232449 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzp8c\" (UniqueName: \"kubernetes.io/projected/f1bb18f6-32f3-4146-aa7b-b423b5ac262a-kube-api-access-gzp8c\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.493116 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-l5vmk"] Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.505665 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-l5vmk"] Dec 08 21:41:39 crc kubenswrapper[4791]: I1208 21:41:39.609809 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" path="/var/lib/kubelet/pods/f1bb18f6-32f3-4146-aa7b-b423b5ac262a/volumes" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.146520 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-c9b8-account-create-update-wpqnb"] Dec 08 21:41:40 crc kubenswrapper[4791]: E1208 21:41:40.147005 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerName="init" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.147018 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerName="init" Dec 08 21:41:40 crc kubenswrapper[4791]: E1208 21:41:40.147026 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerName="dnsmasq-dns" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.147031 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerName="dnsmasq-dns" Dec 08 21:41:40 crc kubenswrapper[4791]: E1208 21:41:40.147044 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" containerName="init" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.147051 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" containerName="init" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.147262 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1bb18f6-32f3-4146-aa7b-b423b5ac262a" containerName="dnsmasq-dns" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.147302 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="da534a4d-ccf5-4297-9a1e-5f3b2ad136f5" containerName="init" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.148086 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.151752 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.157374 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c9b8-account-create-update-wpqnb"] Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.183657 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e2977e67-f037-4524-bfb5-0b04940113f7","Type":"ContainerStarted","Data":"f792ab26b01c53ef77149678af0e93c2a3dbc2df5f60462d550da9862ac9ed56"} Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.222018 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.023832973 podStartE2EDuration="7.221997624s" podCreationTimestamp="2025-12-08 21:41:33 +0000 UTC" firstStartedPulling="2025-12-08 21:41:34.575445904 +0000 UTC m=+1371.274204249" lastFinishedPulling="2025-12-08 21:41:38.773610555 +0000 UTC m=+1375.472368900" observedRunningTime="2025-12-08 21:41:40.214386707 +0000 UTC m=+1376.913145052" watchObservedRunningTime="2025-12-08 21:41:40.221997624 +0000 UTC m=+1376.920755969" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.241344 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-556rp"] Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.243037 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.252671 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-556rp"] Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.260907 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8d6e5db-3948-4362-92ec-b2050a5686f0-operator-scripts\") pod \"glance-c9b8-account-create-update-wpqnb\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.261070 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-895c6\" (UniqueName: \"kubernetes.io/projected/b8d6e5db-3948-4362-92ec-b2050a5686f0-kube-api-access-895c6\") pod \"glance-c9b8-account-create-update-wpqnb\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.363426 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6jmh\" (UniqueName: \"kubernetes.io/projected/9f9e04e6-b15a-48ff-8836-e325232fdf81-kube-api-access-m6jmh\") pod \"glance-db-create-556rp\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.363547 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8d6e5db-3948-4362-92ec-b2050a5686f0-operator-scripts\") pod \"glance-c9b8-account-create-update-wpqnb\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.363636 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-895c6\" (UniqueName: \"kubernetes.io/projected/b8d6e5db-3948-4362-92ec-b2050a5686f0-kube-api-access-895c6\") pod \"glance-c9b8-account-create-update-wpqnb\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.363693 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f9e04e6-b15a-48ff-8836-e325232fdf81-operator-scripts\") pod \"glance-db-create-556rp\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.364638 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8d6e5db-3948-4362-92ec-b2050a5686f0-operator-scripts\") pod \"glance-c9b8-account-create-update-wpqnb\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.391482 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-895c6\" (UniqueName: \"kubernetes.io/projected/b8d6e5db-3948-4362-92ec-b2050a5686f0-kube-api-access-895c6\") pod \"glance-c9b8-account-create-update-wpqnb\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.465783 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6jmh\" (UniqueName: \"kubernetes.io/projected/9f9e04e6-b15a-48ff-8836-e325232fdf81-kube-api-access-m6jmh\") pod \"glance-db-create-556rp\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.466244 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f9e04e6-b15a-48ff-8836-e325232fdf81-operator-scripts\") pod \"glance-db-create-556rp\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.467070 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f9e04e6-b15a-48ff-8836-e325232fdf81-operator-scripts\") pod \"glance-db-create-556rp\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.489338 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6jmh\" (UniqueName: \"kubernetes.io/projected/9f9e04e6-b15a-48ff-8836-e325232fdf81-kube-api-access-m6jmh\") pod \"glance-db-create-556rp\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.489810 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.564126 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-556rp" Dec 08 21:41:40 crc kubenswrapper[4791]: I1208 21:41:40.980750 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c9b8-account-create-update-wpqnb"] Dec 08 21:41:41 crc kubenswrapper[4791]: I1208 21:41:41.078556 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-556rp"] Dec 08 21:41:41 crc kubenswrapper[4791]: W1208 21:41:41.088748 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f9e04e6_b15a_48ff_8836_e325232fdf81.slice/crio-5e75e1334f0cbce7c6fd65f7a8406a287ef738989e8df0ed18daba7389c0d222 WatchSource:0}: Error finding container 5e75e1334f0cbce7c6fd65f7a8406a287ef738989e8df0ed18daba7389c0d222: Status 404 returned error can't find the container with id 5e75e1334f0cbce7c6fd65f7a8406a287ef738989e8df0ed18daba7389c0d222 Dec 08 21:41:41 crc kubenswrapper[4791]: I1208 21:41:41.196245 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c9b8-account-create-update-wpqnb" event={"ID":"b8d6e5db-3948-4362-92ec-b2050a5686f0","Type":"ContainerStarted","Data":"0b1291da6ad3257b421921446aa81c19d0abffd759198676b8ccd35f80691695"} Dec 08 21:41:41 crc kubenswrapper[4791]: I1208 21:41:41.197580 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-556rp" event={"ID":"9f9e04e6-b15a-48ff-8836-e325232fdf81","Type":"ContainerStarted","Data":"5e75e1334f0cbce7c6fd65f7a8406a287ef738989e8df0ed18daba7389c0d222"} Dec 08 21:41:41 crc kubenswrapper[4791]: I1208 21:41:41.198995 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 08 21:41:41 crc kubenswrapper[4791]: I1208 21:41:41.941808 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-trs25" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="registry-server" probeResult="failure" output=< Dec 08 21:41:41 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 21:41:41 crc kubenswrapper[4791]: > Dec 08 21:41:42 crc kubenswrapper[4791]: I1208 21:41:42.208682 4791 generic.go:334] "Generic (PLEG): container finished" podID="b8d6e5db-3948-4362-92ec-b2050a5686f0" containerID="0fa51e101c8817a211840b54d2ccf86e077dcf3fc415d8850fa8f1fcd25615f1" exitCode=0 Dec 08 21:41:42 crc kubenswrapper[4791]: I1208 21:41:42.208742 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c9b8-account-create-update-wpqnb" event={"ID":"b8d6e5db-3948-4362-92ec-b2050a5686f0","Type":"ContainerDied","Data":"0fa51e101c8817a211840b54d2ccf86e077dcf3fc415d8850fa8f1fcd25615f1"} Dec 08 21:41:42 crc kubenswrapper[4791]: I1208 21:41:42.210874 4791 generic.go:334] "Generic (PLEG): container finished" podID="9f9e04e6-b15a-48ff-8836-e325232fdf81" containerID="65065100fdcc2fa117536011ecc1157c1474025a10305b01ac141765c359b34a" exitCode=0 Dec 08 21:41:42 crc kubenswrapper[4791]: I1208 21:41:42.210985 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-556rp" event={"ID":"9f9e04e6-b15a-48ff-8836-e325232fdf81","Type":"ContainerDied","Data":"65065100fdcc2fa117536011ecc1157c1474025a10305b01ac141765c359b34a"} Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.749933 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.872470 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-895c6\" (UniqueName: \"kubernetes.io/projected/b8d6e5db-3948-4362-92ec-b2050a5686f0-kube-api-access-895c6\") pod \"b8d6e5db-3948-4362-92ec-b2050a5686f0\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.872588 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8d6e5db-3948-4362-92ec-b2050a5686f0-operator-scripts\") pod \"b8d6e5db-3948-4362-92ec-b2050a5686f0\" (UID: \"b8d6e5db-3948-4362-92ec-b2050a5686f0\") " Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.873119 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:44 crc kubenswrapper[4791]: E1208 21:41:43.873336 4791 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:41:44 crc kubenswrapper[4791]: E1208 21:41:43.873349 4791 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:41:44 crc kubenswrapper[4791]: E1208 21:41:43.873389 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift podName:7f7de8af-60f4-4571-bc47-95cb97ce0121 nodeName:}" failed. No retries permitted until 2025-12-08 21:41:59.873375884 +0000 UTC m=+1396.572134229 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift") pod "swift-storage-0" (UID: "7f7de8af-60f4-4571-bc47-95cb97ce0121") : configmap "swift-ring-files" not found Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.874918 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8d6e5db-3948-4362-92ec-b2050a5686f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b8d6e5db-3948-4362-92ec-b2050a5686f0" (UID: "b8d6e5db-3948-4362-92ec-b2050a5686f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.899165 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8d6e5db-3948-4362-92ec-b2050a5686f0-kube-api-access-895c6" (OuterVolumeSpecName: "kube-api-access-895c6") pod "b8d6e5db-3948-4362-92ec-b2050a5686f0" (UID: "b8d6e5db-3948-4362-92ec-b2050a5686f0"). InnerVolumeSpecName "kube-api-access-895c6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.975954 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-895c6\" (UniqueName: \"kubernetes.io/projected/b8d6e5db-3948-4362-92ec-b2050a5686f0-kube-api-access-895c6\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:43.976296 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8d6e5db-3948-4362-92ec-b2050a5686f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.235045 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-556rp" event={"ID":"9f9e04e6-b15a-48ff-8836-e325232fdf81","Type":"ContainerDied","Data":"5e75e1334f0cbce7c6fd65f7a8406a287ef738989e8df0ed18daba7389c0d222"} Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.235097 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e75e1334f0cbce7c6fd65f7a8406a287ef738989e8df0ed18daba7389c0d222" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.237634 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c9b8-account-create-update-wpqnb" event={"ID":"b8d6e5db-3948-4362-92ec-b2050a5686f0","Type":"ContainerDied","Data":"0b1291da6ad3257b421921446aa81c19d0abffd759198676b8ccd35f80691695"} Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.237679 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b1291da6ad3257b421921446aa81c19d0abffd759198676b8ccd35f80691695" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.237775 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9b8-account-create-update-wpqnb" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.311134 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-556rp" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.385141 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6jmh\" (UniqueName: \"kubernetes.io/projected/9f9e04e6-b15a-48ff-8836-e325232fdf81-kube-api-access-m6jmh\") pod \"9f9e04e6-b15a-48ff-8836-e325232fdf81\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.385277 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f9e04e6-b15a-48ff-8836-e325232fdf81-operator-scripts\") pod \"9f9e04e6-b15a-48ff-8836-e325232fdf81\" (UID: \"9f9e04e6-b15a-48ff-8836-e325232fdf81\") " Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.386316 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f9e04e6-b15a-48ff-8836-e325232fdf81-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9f9e04e6-b15a-48ff-8836-e325232fdf81" (UID: "9f9e04e6-b15a-48ff-8836-e325232fdf81"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.391420 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f9e04e6-b15a-48ff-8836-e325232fdf81-kube-api-access-m6jmh" (OuterVolumeSpecName: "kube-api-access-m6jmh") pod "9f9e04e6-b15a-48ff-8836-e325232fdf81" (UID: "9f9e04e6-b15a-48ff-8836-e325232fdf81"). InnerVolumeSpecName "kube-api-access-m6jmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.468204 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-2gfz5"] Dec 08 21:41:44 crc kubenswrapper[4791]: E1208 21:41:44.481480 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8d6e5db-3948-4362-92ec-b2050a5686f0" containerName="mariadb-account-create-update" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.481526 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8d6e5db-3948-4362-92ec-b2050a5686f0" containerName="mariadb-account-create-update" Dec 08 21:41:44 crc kubenswrapper[4791]: E1208 21:41:44.481584 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f9e04e6-b15a-48ff-8836-e325232fdf81" containerName="mariadb-database-create" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.481591 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f9e04e6-b15a-48ff-8836-e325232fdf81" containerName="mariadb-database-create" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.482107 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8d6e5db-3948-4362-92ec-b2050a5686f0" containerName="mariadb-account-create-update" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.482138 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f9e04e6-b15a-48ff-8836-e325232fdf81" containerName="mariadb-database-create" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.483151 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.488779 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f9e04e6-b15a-48ff-8836-e325232fdf81-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.488820 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6jmh\" (UniqueName: \"kubernetes.io/projected/9f9e04e6-b15a-48ff-8836-e325232fdf81-kube-api-access-m6jmh\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.508740 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2gfz5"] Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.591497 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67kmq\" (UniqueName: \"kubernetes.io/projected/0e800e49-dab4-40d0-a626-0e6f7a62ed50-kube-api-access-67kmq\") pod \"keystone-db-create-2gfz5\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.591589 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e800e49-dab4-40d0-a626-0e6f7a62ed50-operator-scripts\") pod \"keystone-db-create-2gfz5\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.634406 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-f129-account-create-update-6597r"] Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.636353 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.642186 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.670920 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f129-account-create-update-6597r"] Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.696813 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67kmq\" (UniqueName: \"kubernetes.io/projected/0e800e49-dab4-40d0-a626-0e6f7a62ed50-kube-api-access-67kmq\") pod \"keystone-db-create-2gfz5\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.696990 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e800e49-dab4-40d0-a626-0e6f7a62ed50-operator-scripts\") pod \"keystone-db-create-2gfz5\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.697209 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51d67e99-957e-4645-9a0b-243cdc7e8369-operator-scripts\") pod \"keystone-f129-account-create-update-6597r\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.697291 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96mht\" (UniqueName: \"kubernetes.io/projected/51d67e99-957e-4645-9a0b-243cdc7e8369-kube-api-access-96mht\") pod \"keystone-f129-account-create-update-6597r\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.698924 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e800e49-dab4-40d0-a626-0e6f7a62ed50-operator-scripts\") pod \"keystone-db-create-2gfz5\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.723083 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67kmq\" (UniqueName: \"kubernetes.io/projected/0e800e49-dab4-40d0-a626-0e6f7a62ed50-kube-api-access-67kmq\") pod \"keystone-db-create-2gfz5\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.779759 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-x6ghn"] Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.781590 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.799962 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51d67e99-957e-4645-9a0b-243cdc7e8369-operator-scripts\") pod \"keystone-f129-account-create-update-6597r\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.800792 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96mht\" (UniqueName: \"kubernetes.io/projected/51d67e99-957e-4645-9a0b-243cdc7e8369-kube-api-access-96mht\") pod \"keystone-f129-account-create-update-6597r\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.802842 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-x6ghn"] Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.807053 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51d67e99-957e-4645-9a0b-243cdc7e8369-operator-scripts\") pod \"keystone-f129-account-create-update-6597r\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.816156 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.826355 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96mht\" (UniqueName: \"kubernetes.io/projected/51d67e99-957e-4645-9a0b-243cdc7e8369-kube-api-access-96mht\") pod \"keystone-f129-account-create-update-6597r\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.885778 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-cd7e-account-create-update-92q4x"] Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.887167 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.890482 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.906001 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6c6t\" (UniqueName: \"kubernetes.io/projected/291278e1-f793-4fe7-bf0e-63279ac0ba7d-kube-api-access-b6c6t\") pod \"placement-db-create-x6ghn\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.906245 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/291278e1-f793-4fe7-bf0e-63279ac0ba7d-operator-scripts\") pod \"placement-db-create-x6ghn\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.906916 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cd7e-account-create-update-92q4x"] Dec 08 21:41:44 crc kubenswrapper[4791]: I1208 21:41:44.978865 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.016909 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlmb8\" (UniqueName: \"kubernetes.io/projected/492c6838-316b-4f0a-8115-3ba1b4b05ce2-kube-api-access-tlmb8\") pod \"placement-cd7e-account-create-update-92q4x\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.016991 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/291278e1-f793-4fe7-bf0e-63279ac0ba7d-operator-scripts\") pod \"placement-db-create-x6ghn\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.017021 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492c6838-316b-4f0a-8115-3ba1b4b05ce2-operator-scripts\") pod \"placement-cd7e-account-create-update-92q4x\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.017104 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6c6t\" (UniqueName: \"kubernetes.io/projected/291278e1-f793-4fe7-bf0e-63279ac0ba7d-kube-api-access-b6c6t\") pod \"placement-db-create-x6ghn\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.018589 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/291278e1-f793-4fe7-bf0e-63279ac0ba7d-operator-scripts\") pod \"placement-db-create-x6ghn\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.039478 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6c6t\" (UniqueName: \"kubernetes.io/projected/291278e1-f793-4fe7-bf0e-63279ac0ba7d-kube-api-access-b6c6t\") pod \"placement-db-create-x6ghn\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.115955 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.119484 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492c6838-316b-4f0a-8115-3ba1b4b05ce2-operator-scripts\") pod \"placement-cd7e-account-create-update-92q4x\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.119694 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlmb8\" (UniqueName: \"kubernetes.io/projected/492c6838-316b-4f0a-8115-3ba1b4b05ce2-kube-api-access-tlmb8\") pod \"placement-cd7e-account-create-update-92q4x\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.120184 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492c6838-316b-4f0a-8115-3ba1b4b05ce2-operator-scripts\") pod \"placement-cd7e-account-create-update-92q4x\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.142777 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlmb8\" (UniqueName: \"kubernetes.io/projected/492c6838-316b-4f0a-8115-3ba1b4b05ce2-kube-api-access-tlmb8\") pod \"placement-cd7e-account-create-update-92q4x\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.239408 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.248775 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-556rp" Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.366618 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2gfz5"] Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.527197 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f129-account-create-update-6597r"] Dec 08 21:41:45 crc kubenswrapper[4791]: W1208 21:41:45.534842 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51d67e99_957e_4645_9a0b_243cdc7e8369.slice/crio-e8ab2e98979b9131a3e6d62a00637db5ce00ef1c8d253a605bbb4a333650d45f WatchSource:0}: Error finding container e8ab2e98979b9131a3e6d62a00637db5ce00ef1c8d253a605bbb4a333650d45f: Status 404 returned error can't find the container with id e8ab2e98979b9131a3e6d62a00637db5ce00ef1c8d253a605bbb4a333650d45f Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.690682 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-x6ghn"] Dec 08 21:41:45 crc kubenswrapper[4791]: W1208 21:41:45.699062 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod291278e1_f793_4fe7_bf0e_63279ac0ba7d.slice/crio-0e4f6a66b14914db48e69f4eeae4bdbee246bbcb76ae9582613810945fb98a58 WatchSource:0}: Error finding container 0e4f6a66b14914db48e69f4eeae4bdbee246bbcb76ae9582613810945fb98a58: Status 404 returned error can't find the container with id 0e4f6a66b14914db48e69f4eeae4bdbee246bbcb76ae9582613810945fb98a58 Dec 08 21:41:45 crc kubenswrapper[4791]: I1208 21:41:45.808081 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cd7e-account-create-update-92q4x"] Dec 08 21:41:45 crc kubenswrapper[4791]: W1208 21:41:45.871166 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod492c6838_316b_4f0a_8115_3ba1b4b05ce2.slice/crio-6f04ecca01f1762c6bb7eeadd4cf1d1b09330c0c9299671468c79ed7ebd73a2d WatchSource:0}: Error finding container 6f04ecca01f1762c6bb7eeadd4cf1d1b09330c0c9299671468c79ed7ebd73a2d: Status 404 returned error can't find the container with id 6f04ecca01f1762c6bb7eeadd4cf1d1b09330c0c9299671468c79ed7ebd73a2d Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.262034 4791 generic.go:334] "Generic (PLEG): container finished" podID="51d67e99-957e-4645-9a0b-243cdc7e8369" containerID="41152df6293e432eda2c80f16af4bb9256bcfba8151cd03681546fd8c1f63b09" exitCode=0 Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.262168 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f129-account-create-update-6597r" event={"ID":"51d67e99-957e-4645-9a0b-243cdc7e8369","Type":"ContainerDied","Data":"41152df6293e432eda2c80f16af4bb9256bcfba8151cd03681546fd8c1f63b09"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.262545 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f129-account-create-update-6597r" event={"ID":"51d67e99-957e-4645-9a0b-243cdc7e8369","Type":"ContainerStarted","Data":"e8ab2e98979b9131a3e6d62a00637db5ce00ef1c8d253a605bbb4a333650d45f"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.266257 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cd7e-account-create-update-92q4x" event={"ID":"492c6838-316b-4f0a-8115-3ba1b4b05ce2","Type":"ContainerStarted","Data":"02bf6659e9bf295b0760e012f5c47ed20f72cfb4a8fb0bf7369f5435dce676c5"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.266304 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cd7e-account-create-update-92q4x" event={"ID":"492c6838-316b-4f0a-8115-3ba1b4b05ce2","Type":"ContainerStarted","Data":"6f04ecca01f1762c6bb7eeadd4cf1d1b09330c0c9299671468c79ed7ebd73a2d"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.268237 4791 generic.go:334] "Generic (PLEG): container finished" podID="0e800e49-dab4-40d0-a626-0e6f7a62ed50" containerID="83dde5224ff99f69fb62e6036f3a47833aef4697d449be86110c372cc3ef69a6" exitCode=0 Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.268301 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2gfz5" event={"ID":"0e800e49-dab4-40d0-a626-0e6f7a62ed50","Type":"ContainerDied","Data":"83dde5224ff99f69fb62e6036f3a47833aef4697d449be86110c372cc3ef69a6"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.268323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2gfz5" event={"ID":"0e800e49-dab4-40d0-a626-0e6f7a62ed50","Type":"ContainerStarted","Data":"7704602afc0414854f2555b9ca2625e3983d7dcbfebe37611a383e895ed93455"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.270323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-x6ghn" event={"ID":"291278e1-f793-4fe7-bf0e-63279ac0ba7d","Type":"ContainerStarted","Data":"c1674c2cc7dee6194b90bb7afa8bd4d796d4f1a615273870c9db4db8ef7f2ba1"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.270380 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-x6ghn" event={"ID":"291278e1-f793-4fe7-bf0e-63279ac0ba7d","Type":"ContainerStarted","Data":"0e4f6a66b14914db48e69f4eeae4bdbee246bbcb76ae9582613810945fb98a58"} Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.289547 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-x6ghn" podStartSLOduration=2.289527355 podStartE2EDuration="2.289527355s" podCreationTimestamp="2025-12-08 21:41:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:46.287471584 +0000 UTC m=+1382.986229929" watchObservedRunningTime="2025-12-08 21:41:46.289527355 +0000 UTC m=+1382.988285700" Dec 08 21:41:46 crc kubenswrapper[4791]: I1208 21:41:46.332385 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-cd7e-account-create-update-92q4x" podStartSLOduration=2.332361198 podStartE2EDuration="2.332361198s" podCreationTimestamp="2025-12-08 21:41:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:46.319989164 +0000 UTC m=+1383.018747509" watchObservedRunningTime="2025-12-08 21:41:46.332361198 +0000 UTC m=+1383.031119543" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.280514 4791 generic.go:334] "Generic (PLEG): container finished" podID="291278e1-f793-4fe7-bf0e-63279ac0ba7d" containerID="c1674c2cc7dee6194b90bb7afa8bd4d796d4f1a615273870c9db4db8ef7f2ba1" exitCode=0 Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.280633 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-x6ghn" event={"ID":"291278e1-f793-4fe7-bf0e-63279ac0ba7d","Type":"ContainerDied","Data":"c1674c2cc7dee6194b90bb7afa8bd4d796d4f1a615273870c9db4db8ef7f2ba1"} Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.284505 4791 generic.go:334] "Generic (PLEG): container finished" podID="492c6838-316b-4f0a-8115-3ba1b4b05ce2" containerID="02bf6659e9bf295b0760e012f5c47ed20f72cfb4a8fb0bf7369f5435dce676c5" exitCode=0 Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.284631 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cd7e-account-create-update-92q4x" event={"ID":"492c6838-316b-4f0a-8115-3ba1b4b05ce2","Type":"ContainerDied","Data":"02bf6659e9bf295b0760e012f5c47ed20f72cfb4a8fb0bf7369f5435dce676c5"} Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.285774 4791 generic.go:334] "Generic (PLEG): container finished" podID="5a5de61a-f218-4e36-afaf-2cab04468093" containerID="b49673eb5fcd6da7a31d0903b0ef603fb3c52b2d6b555e37ce75e071e7075df6" exitCode=0 Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.285809 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-249fw" event={"ID":"5a5de61a-f218-4e36-afaf-2cab04468093","Type":"ContainerDied","Data":"b49673eb5fcd6da7a31d0903b0ef603fb3c52b2d6b555e37ce75e071e7075df6"} Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.769079 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.865574 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.888270 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e800e49-dab4-40d0-a626-0e6f7a62ed50-operator-scripts\") pod \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.888635 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67kmq\" (UniqueName: \"kubernetes.io/projected/0e800e49-dab4-40d0-a626-0e6f7a62ed50-kube-api-access-67kmq\") pod \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\" (UID: \"0e800e49-dab4-40d0-a626-0e6f7a62ed50\") " Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.889033 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e800e49-dab4-40d0-a626-0e6f7a62ed50-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0e800e49-dab4-40d0-a626-0e6f7a62ed50" (UID: "0e800e49-dab4-40d0-a626-0e6f7a62ed50"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.889413 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e800e49-dab4-40d0-a626-0e6f7a62ed50-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.897683 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e800e49-dab4-40d0-a626-0e6f7a62ed50-kube-api-access-67kmq" (OuterVolumeSpecName: "kube-api-access-67kmq") pod "0e800e49-dab4-40d0-a626-0e6f7a62ed50" (UID: "0e800e49-dab4-40d0-a626-0e6f7a62ed50"). InnerVolumeSpecName "kube-api-access-67kmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.991032 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51d67e99-957e-4645-9a0b-243cdc7e8369-operator-scripts\") pod \"51d67e99-957e-4645-9a0b-243cdc7e8369\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.991143 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96mht\" (UniqueName: \"kubernetes.io/projected/51d67e99-957e-4645-9a0b-243cdc7e8369-kube-api-access-96mht\") pod \"51d67e99-957e-4645-9a0b-243cdc7e8369\" (UID: \"51d67e99-957e-4645-9a0b-243cdc7e8369\") " Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.991648 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51d67e99-957e-4645-9a0b-243cdc7e8369-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "51d67e99-957e-4645-9a0b-243cdc7e8369" (UID: "51d67e99-957e-4645-9a0b-243cdc7e8369"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.991820 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51d67e99-957e-4645-9a0b-243cdc7e8369-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.991896 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67kmq\" (UniqueName: \"kubernetes.io/projected/0e800e49-dab4-40d0-a626-0e6f7a62ed50-kube-api-access-67kmq\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4791]: I1208 21:41:47.995515 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51d67e99-957e-4645-9a0b-243cdc7e8369-kube-api-access-96mht" (OuterVolumeSpecName: "kube-api-access-96mht") pod "51d67e99-957e-4645-9a0b-243cdc7e8369" (UID: "51d67e99-957e-4645-9a0b-243cdc7e8369"). InnerVolumeSpecName "kube-api-access-96mht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.094142 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96mht\" (UniqueName: \"kubernetes.io/projected/51d67e99-957e-4645-9a0b-243cdc7e8369-kube-api-access-96mht\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.294251 4791 generic.go:334] "Generic (PLEG): container finished" podID="87530e07-a720-4b5f-bd6f-c3f8bb540453" containerID="91ad5287af76fcf551cf30d4f96a41d4fef558f7ab29815285c08d54b59a4e7c" exitCode=0 Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.294314 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-2" event={"ID":"87530e07-a720-4b5f-bd6f-c3f8bb540453","Type":"ContainerDied","Data":"91ad5287af76fcf551cf30d4f96a41d4fef558f7ab29815285c08d54b59a4e7c"} Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.295677 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f129-account-create-update-6597r" event={"ID":"51d67e99-957e-4645-9a0b-243cdc7e8369","Type":"ContainerDied","Data":"e8ab2e98979b9131a3e6d62a00637db5ce00ef1c8d253a605bbb4a333650d45f"} Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.295731 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8ab2e98979b9131a3e6d62a00637db5ce00ef1c8d253a605bbb4a333650d45f" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.295703 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f129-account-create-update-6597r" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.300371 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2gfz5" event={"ID":"0e800e49-dab4-40d0-a626-0e6f7a62ed50","Type":"ContainerDied","Data":"7704602afc0414854f2555b9ca2625e3983d7dcbfebe37611a383e895ed93455"} Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.300441 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7704602afc0414854f2555b9ca2625e3983d7dcbfebe37611a383e895ed93455" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.300526 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2gfz5" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.618119 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.824251 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.899968 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.914144 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bg7m\" (UniqueName: \"kubernetes.io/projected/5a5de61a-f218-4e36-afaf-2cab04468093-kube-api-access-6bg7m\") pod \"5a5de61a-f218-4e36-afaf-2cab04468093\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.915205 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-ring-data-devices\") pod \"5a5de61a-f218-4e36-afaf-2cab04468093\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.915368 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-combined-ca-bundle\") pod \"5a5de61a-f218-4e36-afaf-2cab04468093\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.915470 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5a5de61a-f218-4e36-afaf-2cab04468093-etc-swift\") pod \"5a5de61a-f218-4e36-afaf-2cab04468093\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.915508 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-scripts\") pod \"5a5de61a-f218-4e36-afaf-2cab04468093\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.915645 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-swiftconf\") pod \"5a5de61a-f218-4e36-afaf-2cab04468093\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.915719 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-dispersionconf\") pod \"5a5de61a-f218-4e36-afaf-2cab04468093\" (UID: \"5a5de61a-f218-4e36-afaf-2cab04468093\") " Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.919520 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a5de61a-f218-4e36-afaf-2cab04468093-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "5a5de61a-f218-4e36-afaf-2cab04468093" (UID: "5a5de61a-f218-4e36-afaf-2cab04468093"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.923206 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "5a5de61a-f218-4e36-afaf-2cab04468093" (UID: "5a5de61a-f218-4e36-afaf-2cab04468093"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.933637 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a5de61a-f218-4e36-afaf-2cab04468093-kube-api-access-6bg7m" (OuterVolumeSpecName: "kube-api-access-6bg7m") pod "5a5de61a-f218-4e36-afaf-2cab04468093" (UID: "5a5de61a-f218-4e36-afaf-2cab04468093"). InnerVolumeSpecName "kube-api-access-6bg7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.958099 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "5a5de61a-f218-4e36-afaf-2cab04468093" (UID: "5a5de61a-f218-4e36-afaf-2cab04468093"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4791]: I1208 21:41:48.985394 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-scripts" (OuterVolumeSpecName: "scripts") pod "5a5de61a-f218-4e36-afaf-2cab04468093" (UID: "5a5de61a-f218-4e36-afaf-2cab04468093"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.005731 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a5de61a-f218-4e36-afaf-2cab04468093" (UID: "5a5de61a-f218-4e36-afaf-2cab04468093"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.007252 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "5a5de61a-f218-4e36-afaf-2cab04468093" (UID: "5a5de61a-f218-4e36-afaf-2cab04468093"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.011347 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-q4qnx"] Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.020782 4791 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.020811 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.020825 4791 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5a5de61a-f218-4e36-afaf-2cab04468093-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.020838 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a5de61a-f218-4e36-afaf-2cab04468093-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.020850 4791 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.020860 4791 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5a5de61a-f218-4e36-afaf-2cab04468093-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.020872 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bg7m\" (UniqueName: \"kubernetes.io/projected/5a5de61a-f218-4e36-afaf-2cab04468093-kube-api-access-6bg7m\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.038412 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.055275 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.123848 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlmb8\" (UniqueName: \"kubernetes.io/projected/492c6838-316b-4f0a-8115-3ba1b4b05ce2-kube-api-access-tlmb8\") pod \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.124371 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492c6838-316b-4f0a-8115-3ba1b4b05ce2-operator-scripts\") pod \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\" (UID: \"492c6838-316b-4f0a-8115-3ba1b4b05ce2\") " Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.124516 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/291278e1-f793-4fe7-bf0e-63279ac0ba7d-operator-scripts\") pod \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.124687 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6c6t\" (UniqueName: \"kubernetes.io/projected/291278e1-f793-4fe7-bf0e-63279ac0ba7d-kube-api-access-b6c6t\") pod \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\" (UID: \"291278e1-f793-4fe7-bf0e-63279ac0ba7d\") " Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.126388 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/291278e1-f793-4fe7-bf0e-63279ac0ba7d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "291278e1-f793-4fe7-bf0e-63279ac0ba7d" (UID: "291278e1-f793-4fe7-bf0e-63279ac0ba7d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.126731 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/492c6838-316b-4f0a-8115-3ba1b4b05ce2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "492c6838-316b-4f0a-8115-3ba1b4b05ce2" (UID: "492c6838-316b-4f0a-8115-3ba1b4b05ce2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.130506 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/492c6838-316b-4f0a-8115-3ba1b4b05ce2-kube-api-access-tlmb8" (OuterVolumeSpecName: "kube-api-access-tlmb8") pod "492c6838-316b-4f0a-8115-3ba1b4b05ce2" (UID: "492c6838-316b-4f0a-8115-3ba1b4b05ce2"). InnerVolumeSpecName "kube-api-access-tlmb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.131041 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/291278e1-f793-4fe7-bf0e-63279ac0ba7d-kube-api-access-b6c6t" (OuterVolumeSpecName: "kube-api-access-b6c6t") pod "291278e1-f793-4fe7-bf0e-63279ac0ba7d" (UID: "291278e1-f793-4fe7-bf0e-63279ac0ba7d"). InnerVolumeSpecName "kube-api-access-b6c6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.227605 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492c6838-316b-4f0a-8115-3ba1b4b05ce2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.227646 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/291278e1-f793-4fe7-bf0e-63279ac0ba7d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.227657 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6c6t\" (UniqueName: \"kubernetes.io/projected/291278e1-f793-4fe7-bf0e-63279ac0ba7d-kube-api-access-b6c6t\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.227668 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlmb8\" (UniqueName: \"kubernetes.io/projected/492c6838-316b-4f0a-8115-3ba1b4b05ce2-kube-api-access-tlmb8\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.323382 4791 generic.go:334] "Generic (PLEG): container finished" podID="d9cd6ba2-6502-43cf-8e48-36570ea8e831" containerID="6a0637a0f4a66c462da7dc957cd4c2ba75e3dc02cef51ffe6840ae5171578b30" exitCode=0 Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.323597 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d9cd6ba2-6502-43cf-8e48-36570ea8e831","Type":"ContainerDied","Data":"6a0637a0f4a66c462da7dc957cd4c2ba75e3dc02cef51ffe6840ae5171578b30"} Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.326734 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-249fw" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.327759 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-249fw" event={"ID":"5a5de61a-f218-4e36-afaf-2cab04468093","Type":"ContainerDied","Data":"5271e2c781f7894fa596ccfdc6daf80e1eeabcd29ab54582cbd21032795fc9fb"} Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.327789 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5271e2c781f7894fa596ccfdc6daf80e1eeabcd29ab54582cbd21032795fc9fb" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.332555 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6ghn" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.336409 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-x6ghn" event={"ID":"291278e1-f793-4fe7-bf0e-63279ac0ba7d","Type":"ContainerDied","Data":"0e4f6a66b14914db48e69f4eeae4bdbee246bbcb76ae9582613810945fb98a58"} Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.336450 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e4f6a66b14914db48e69f4eeae4bdbee246bbcb76ae9582613810945fb98a58" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.339052 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-2" event={"ID":"87530e07-a720-4b5f-bd6f-c3f8bb540453","Type":"ContainerStarted","Data":"740ee5dba066600ffcc26397a0ac31f72b701616a5788f6ec8ab328f5d57b6b3"} Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.340083 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-2" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.349119 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" podUID="523f1b51-d3bc-419c-ac74-63559640838c" containerName="dnsmasq-dns" containerID="cri-o://471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924" gracePeriod=10 Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.351423 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cd7e-account-create-update-92q4x" event={"ID":"492c6838-316b-4f0a-8115-3ba1b4b05ce2","Type":"ContainerDied","Data":"6f04ecca01f1762c6bb7eeadd4cf1d1b09330c0c9299671468c79ed7ebd73a2d"} Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.351473 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f04ecca01f1762c6bb7eeadd4cf1d1b09330c0c9299671468c79ed7ebd73a2d" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.351571 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cd7e-account-create-update-92q4x" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.399532 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-2" podStartSLOduration=38.601270975 podStartE2EDuration="1m10.399513174s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="2025-12-08 21:40:42.383785776 +0000 UTC m=+1319.082544121" lastFinishedPulling="2025-12-08 21:41:14.182027975 +0000 UTC m=+1350.880786320" observedRunningTime="2025-12-08 21:41:49.383737726 +0000 UTC m=+1386.082496071" watchObservedRunningTime="2025-12-08 21:41:49.399513174 +0000 UTC m=+1386.098271519" Dec 08 21:41:49 crc kubenswrapper[4791]: I1208 21:41:49.941125 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.046696 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhgz7\" (UniqueName: \"kubernetes.io/projected/523f1b51-d3bc-419c-ac74-63559640838c-kube-api-access-xhgz7\") pod \"523f1b51-d3bc-419c-ac74-63559640838c\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.046807 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-config\") pod \"523f1b51-d3bc-419c-ac74-63559640838c\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.046918 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-ovsdbserver-nb\") pod \"523f1b51-d3bc-419c-ac74-63559640838c\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.047133 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-dns-svc\") pod \"523f1b51-d3bc-419c-ac74-63559640838c\" (UID: \"523f1b51-d3bc-419c-ac74-63559640838c\") " Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.056066 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/523f1b51-d3bc-419c-ac74-63559640838c-kube-api-access-xhgz7" (OuterVolumeSpecName: "kube-api-access-xhgz7") pod "523f1b51-d3bc-419c-ac74-63559640838c" (UID: "523f1b51-d3bc-419c-ac74-63559640838c"). InnerVolumeSpecName "kube-api-access-xhgz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.101483 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "523f1b51-d3bc-419c-ac74-63559640838c" (UID: "523f1b51-d3bc-419c-ac74-63559640838c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.107688 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "523f1b51-d3bc-419c-ac74-63559640838c" (UID: "523f1b51-d3bc-419c-ac74-63559640838c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.130802 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-config" (OuterVolumeSpecName: "config") pod "523f1b51-d3bc-419c-ac74-63559640838c" (UID: "523f1b51-d3bc-419c-ac74-63559640838c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.149196 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.149232 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhgz7\" (UniqueName: \"kubernetes.io/projected/523f1b51-d3bc-419c-ac74-63559640838c-kube-api-access-xhgz7\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.149242 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.149253 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/523f1b51-d3bc-419c-ac74-63559640838c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.359548 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d9cd6ba2-6502-43cf-8e48-36570ea8e831","Type":"ContainerStarted","Data":"921fe9ae616cfe775c1a5f09c9c75d88454fe3dff93c40fd314f62defa124350"} Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.362199 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.371798 4791 generic.go:334] "Generic (PLEG): container finished" podID="523f1b51-d3bc-419c-ac74-63559640838c" containerID="471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924" exitCode=0 Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.373234 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.375923 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" event={"ID":"523f1b51-d3bc-419c-ac74-63559640838c","Type":"ContainerDied","Data":"471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924"} Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.376004 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-q4qnx" event={"ID":"523f1b51-d3bc-419c-ac74-63559640838c","Type":"ContainerDied","Data":"1550fb1e8833664f58980968f42b7ca46d38cc617bfa04b476535e30334b675c"} Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.376037 4791 scope.go:117] "RemoveContainer" containerID="471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.407778 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371966.447021 podStartE2EDuration="1m10.407753852s" podCreationTimestamp="2025-12-08 21:40:40 +0000 UTC" firstStartedPulling="2025-12-08 21:40:42.511847391 +0000 UTC m=+1319.210605736" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:50.394872825 +0000 UTC m=+1387.093631190" watchObservedRunningTime="2025-12-08 21:41:50.407753852 +0000 UTC m=+1387.106512197" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.408543 4791 scope.go:117] "RemoveContainer" containerID="ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.433812 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-q4qnx"] Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.448226 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-q4qnx"] Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.452008 4791 scope.go:117] "RemoveContainer" containerID="471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.455499 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924\": container with ID starting with 471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924 not found: ID does not exist" containerID="471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.455562 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924"} err="failed to get container status \"471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924\": rpc error: code = NotFound desc = could not find container \"471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924\": container with ID starting with 471980b8dc123b89b3c02acd73c1255390576c424b106f8070f8566bbc4f5924 not found: ID does not exist" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.455596 4791 scope.go:117] "RemoveContainer" containerID="ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.456852 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced\": container with ID starting with ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced not found: ID does not exist" containerID="ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.456903 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced"} err="failed to get container status \"ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced\": rpc error: code = NotFound desc = could not find container \"ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced\": container with ID starting with ea2cf20a61f76e3623cc014bc14f61623743171a132910a7f4f89075564a0ced not found: ID does not exist" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.484650 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-rrtnb"] Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.485142 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="523f1b51-d3bc-419c-ac74-63559640838c" containerName="init" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485171 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="523f1b51-d3bc-419c-ac74-63559640838c" containerName="init" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.485186 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="492c6838-316b-4f0a-8115-3ba1b4b05ce2" containerName="mariadb-account-create-update" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485193 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="492c6838-316b-4f0a-8115-3ba1b4b05ce2" containerName="mariadb-account-create-update" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.485208 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d67e99-957e-4645-9a0b-243cdc7e8369" containerName="mariadb-account-create-update" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485215 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d67e99-957e-4645-9a0b-243cdc7e8369" containerName="mariadb-account-create-update" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.485230 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="523f1b51-d3bc-419c-ac74-63559640838c" containerName="dnsmasq-dns" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485236 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="523f1b51-d3bc-419c-ac74-63559640838c" containerName="dnsmasq-dns" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.485256 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a5de61a-f218-4e36-afaf-2cab04468093" containerName="swift-ring-rebalance" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485262 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a5de61a-f218-4e36-afaf-2cab04468093" containerName="swift-ring-rebalance" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.485282 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291278e1-f793-4fe7-bf0e-63279ac0ba7d" containerName="mariadb-database-create" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485289 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="291278e1-f793-4fe7-bf0e-63279ac0ba7d" containerName="mariadb-database-create" Dec 08 21:41:50 crc kubenswrapper[4791]: E1208 21:41:50.485305 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e800e49-dab4-40d0-a626-0e6f7a62ed50" containerName="mariadb-database-create" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485311 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e800e49-dab4-40d0-a626-0e6f7a62ed50" containerName="mariadb-database-create" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485495 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="492c6838-316b-4f0a-8115-3ba1b4b05ce2" containerName="mariadb-account-create-update" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485506 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="291278e1-f793-4fe7-bf0e-63279ac0ba7d" containerName="mariadb-database-create" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485520 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e800e49-dab4-40d0-a626-0e6f7a62ed50" containerName="mariadb-database-create" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485532 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a5de61a-f218-4e36-afaf-2cab04468093" containerName="swift-ring-rebalance" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485548 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="51d67e99-957e-4645-9a0b-243cdc7e8369" containerName="mariadb-account-create-update" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.485558 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="523f1b51-d3bc-419c-ac74-63559640838c" containerName="dnsmasq-dns" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.486360 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.489606 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mzhj6" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.489750 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.499281 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rrtnb"] Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.557683 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-config-data\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.557826 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-db-sync-config-data\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.557957 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdvfx\" (UniqueName: \"kubernetes.io/projected/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-kube-api-access-zdvfx\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.558018 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-combined-ca-bundle\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.659908 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-db-sync-config-data\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.660116 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdvfx\" (UniqueName: \"kubernetes.io/projected/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-kube-api-access-zdvfx\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.660199 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-combined-ca-bundle\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.660294 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-config-data\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.663956 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-db-sync-config-data\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.664152 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-combined-ca-bundle\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.665179 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-config-data\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.679267 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdvfx\" (UniqueName: \"kubernetes.io/projected/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-kube-api-access-zdvfx\") pod \"glance-db-sync-rrtnb\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.809496 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rrtnb" Dec 08 21:41:50 crc kubenswrapper[4791]: I1208 21:41:50.974076 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:51 crc kubenswrapper[4791]: I1208 21:41:51.038222 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:51 crc kubenswrapper[4791]: I1208 21:41:51.239070 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-trs25"] Dec 08 21:41:51 crc kubenswrapper[4791]: I1208 21:41:51.368247 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rrtnb"] Dec 08 21:41:51 crc kubenswrapper[4791]: I1208 21:41:51.613143 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="523f1b51-d3bc-419c-ac74-63559640838c" path="/var/lib/kubelet/pods/523f1b51-d3bc-419c-ac74-63559640838c/volumes" Dec 08 21:41:52 crc kubenswrapper[4791]: I1208 21:41:52.400967 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rrtnb" event={"ID":"e74124c9-f94a-4168-a9b1-dafbcb9e0f70","Type":"ContainerStarted","Data":"79afbc79ac0b0ca148d93cb10ac7591fc528c93a4a6822e3273e17fc48ec3ce9"} Dec 08 21:41:52 crc kubenswrapper[4791]: I1208 21:41:52.401018 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-trs25" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="registry-server" containerID="cri-o://e2b0a5b74d1d79c81f058ce34ad85e975ef4627c890e756ef41d48baaf6a1b30" gracePeriod=2 Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.412161 4791 generic.go:334] "Generic (PLEG): container finished" podID="6101a045-4b01-484e-a65b-4c406e458ea1" containerID="fa76d2f6266b5898e49b028af878b3eaa819a1d539b7e2d1e7c5332a2ee431f0" exitCode=0 Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.412347 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-1" event={"ID":"6101a045-4b01-484e-a65b-4c406e458ea1","Type":"ContainerDied","Data":"fa76d2f6266b5898e49b028af878b3eaa819a1d539b7e2d1e7c5332a2ee431f0"} Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.421683 4791 generic.go:334] "Generic (PLEG): container finished" podID="95730417-3c85-44a0-9a95-8fefeb495a03" containerID="e2b0a5b74d1d79c81f058ce34ad85e975ef4627c890e756ef41d48baaf6a1b30" exitCode=0 Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.421736 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-trs25" event={"ID":"95730417-3c85-44a0-9a95-8fefeb495a03","Type":"ContainerDied","Data":"e2b0a5b74d1d79c81f058ce34ad85e975ef4627c890e756ef41d48baaf6a1b30"} Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.703291 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.831459 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-utilities\") pod \"95730417-3c85-44a0-9a95-8fefeb495a03\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.831625 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hx2n\" (UniqueName: \"kubernetes.io/projected/95730417-3c85-44a0-9a95-8fefeb495a03-kube-api-access-4hx2n\") pod \"95730417-3c85-44a0-9a95-8fefeb495a03\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.831688 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-catalog-content\") pod \"95730417-3c85-44a0-9a95-8fefeb495a03\" (UID: \"95730417-3c85-44a0-9a95-8fefeb495a03\") " Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.832297 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-utilities" (OuterVolumeSpecName: "utilities") pod "95730417-3c85-44a0-9a95-8fefeb495a03" (UID: "95730417-3c85-44a0-9a95-8fefeb495a03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.848948 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95730417-3c85-44a0-9a95-8fefeb495a03-kube-api-access-4hx2n" (OuterVolumeSpecName: "kube-api-access-4hx2n") pod "95730417-3c85-44a0-9a95-8fefeb495a03" (UID: "95730417-3c85-44a0-9a95-8fefeb495a03"). InnerVolumeSpecName "kube-api-access-4hx2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.862461 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.933832 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.933866 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hx2n\" (UniqueName: \"kubernetes.io/projected/95730417-3c85-44a0-9a95-8fefeb495a03-kube-api-access-4hx2n\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:53 crc kubenswrapper[4791]: I1208 21:41:53.961058 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95730417-3c85-44a0-9a95-8fefeb495a03" (UID: "95730417-3c85-44a0-9a95-8fefeb495a03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.036159 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95730417-3c85-44a0-9a95-8fefeb495a03-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.436093 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-trs25" event={"ID":"95730417-3c85-44a0-9a95-8fefeb495a03","Type":"ContainerDied","Data":"4aedcc7888b61bc586442f0555b29dd0bdaedd14c49cf4c8fe64784f386468b0"} Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.436147 4791 scope.go:117] "RemoveContainer" containerID="e2b0a5b74d1d79c81f058ce34ad85e975ef4627c890e756ef41d48baaf6a1b30" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.436286 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-trs25" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.444324 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-1" event={"ID":"6101a045-4b01-484e-a65b-4c406e458ea1","Type":"ContainerStarted","Data":"003db2f2efcdd293b4cf42c41d9f5ad6d781ddd5c4688467f48d101c1013f33e"} Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.445540 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-1" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.448108 4791 generic.go:334] "Generic (PLEG): container finished" podID="45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9" containerID="98c8e4efd467bfd4b9f8c3e964d4dac9a711ce08771957c180d136b9be560757" exitCode=0 Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.448166 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9","Type":"ContainerDied","Data":"98c8e4efd467bfd4b9f8c3e964d4dac9a711ce08771957c180d136b9be560757"} Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.469172 4791 scope.go:117] "RemoveContainer" containerID="2844490bb124b2b0cb9120a218e1c1c2be5253a09ceec9a1503462767511baaf" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.483417 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-1" podStartSLOduration=-9223371961.371378 podStartE2EDuration="1m15.483398542s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="2025-12-08 21:40:42.04515699 +0000 UTC m=+1318.743915325" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:54.474640787 +0000 UTC m=+1391.173399132" watchObservedRunningTime="2025-12-08 21:41:54.483398542 +0000 UTC m=+1391.182156887" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.543187 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-trs25"] Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.557245 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-trs25"] Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.682338 4791 scope.go:117] "RemoveContainer" containerID="2725de65e588a0e7fa93dc2e03cb488d8b5669484fd6cbb801990b7b02fef5a6" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.881504 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-g2zr4" podUID="0ab866e7-9292-4d1c-b55e-6d29c9d23b05" containerName="ovn-controller" probeResult="failure" output=< Dec 08 21:41:54 crc kubenswrapper[4791]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 08 21:41:54 crc kubenswrapper[4791]: > Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.907385 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:41:54 crc kubenswrapper[4791]: I1208 21:41:54.908287 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-9b4hh" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.173508 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-g2zr4-config-cntwx"] Dec 08 21:41:55 crc kubenswrapper[4791]: E1208 21:41:55.174081 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="extract-content" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.174105 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="extract-content" Dec 08 21:41:55 crc kubenswrapper[4791]: E1208 21:41:55.174150 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="registry-server" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.174163 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="registry-server" Dec 08 21:41:55 crc kubenswrapper[4791]: E1208 21:41:55.174185 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="extract-utilities" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.174199 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="extract-utilities" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.174418 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" containerName="registry-server" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.175179 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.178017 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.211314 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-g2zr4-config-cntwx"] Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.263140 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-scripts\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.263231 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-log-ovn\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.263356 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.263430 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcw9n\" (UniqueName: \"kubernetes.io/projected/46c514cc-472d-4668-8ba5-d7c31377af02-kube-api-access-fcw9n\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.263602 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-additional-scripts\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.263829 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run-ovn\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.366517 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-scripts\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.366963 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-log-ovn\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.367041 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.367143 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcw9n\" (UniqueName: \"kubernetes.io/projected/46c514cc-472d-4668-8ba5-d7c31377af02-kube-api-access-fcw9n\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.367222 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-additional-scripts\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.367325 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-log-ovn\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.367378 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.367395 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run-ovn\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.367439 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run-ovn\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.368015 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-additional-scripts\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.368680 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-scripts\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.386846 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcw9n\" (UniqueName: \"kubernetes.io/projected/46c514cc-472d-4668-8ba5-d7c31377af02-kube-api-access-fcw9n\") pod \"ovn-controller-g2zr4-config-cntwx\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.459175 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9","Type":"ContainerStarted","Data":"f27dfd18dec9e471d90de76deace2295df48590aaba288f1138335623f68584b"} Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.459421 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.503773 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:55 crc kubenswrapper[4791]: I1208 21:41:55.614795 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95730417-3c85-44a0-9a95-8fefeb495a03" path="/var/lib/kubelet/pods/95730417-3c85-44a0-9a95-8fefeb495a03/volumes" Dec 08 21:41:56 crc kubenswrapper[4791]: I1208 21:41:56.012582 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371959.842215 podStartE2EDuration="1m17.012561787s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="2025-12-08 21:40:42.514261041 +0000 UTC m=+1319.213019386" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:55.50994674 +0000 UTC m=+1392.208705105" watchObservedRunningTime="2025-12-08 21:41:56.012561787 +0000 UTC m=+1392.711320132" Dec 08 21:41:56 crc kubenswrapper[4791]: I1208 21:41:56.016377 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-g2zr4-config-cntwx"] Dec 08 21:41:56 crc kubenswrapper[4791]: I1208 21:41:56.477675 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-g2zr4-config-cntwx" event={"ID":"46c514cc-472d-4668-8ba5-d7c31377af02","Type":"ContainerStarted","Data":"8c065d659584282082b219bec1e800a937d6e54e4448977fe6cd1ee556e5e891"} Dec 08 21:41:56 crc kubenswrapper[4791]: I1208 21:41:56.478312 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-g2zr4-config-cntwx" event={"ID":"46c514cc-472d-4668-8ba5-d7c31377af02","Type":"ContainerStarted","Data":"2dbb2b7873b9823e552b55be497efc13ac48e3ebb95de52885b1d1618887ed27"} Dec 08 21:41:56 crc kubenswrapper[4791]: I1208 21:41:56.503200 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-g2zr4-config-cntwx" podStartSLOduration=1.5031710679999999 podStartE2EDuration="1.503171068s" podCreationTimestamp="2025-12-08 21:41:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:56.497838747 +0000 UTC m=+1393.196597112" watchObservedRunningTime="2025-12-08 21:41:56.503171068 +0000 UTC m=+1393.201929413" Dec 08 21:41:57 crc kubenswrapper[4791]: I1208 21:41:57.491228 4791 generic.go:334] "Generic (PLEG): container finished" podID="46c514cc-472d-4668-8ba5-d7c31377af02" containerID="8c065d659584282082b219bec1e800a937d6e54e4448977fe6cd1ee556e5e891" exitCode=0 Dec 08 21:41:57 crc kubenswrapper[4791]: I1208 21:41:57.491306 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-g2zr4-config-cntwx" event={"ID":"46c514cc-472d-4668-8ba5-d7c31377af02","Type":"ContainerDied","Data":"8c065d659584282082b219bec1e800a937d6e54e4448977fe6cd1ee556e5e891"} Dec 08 21:41:57 crc kubenswrapper[4791]: I1208 21:41:57.493325 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="fa46debf45bbc591d3f3d2ff279b0f4ae741603ae5627a9244c1052ab274ef8a" exitCode=1 Dec 08 21:41:57 crc kubenswrapper[4791]: I1208 21:41:57.493368 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"fa46debf45bbc591d3f3d2ff279b0f4ae741603ae5627a9244c1052ab274ef8a"} Dec 08 21:41:57 crc kubenswrapper[4791]: I1208 21:41:57.494052 4791 scope.go:117] "RemoveContainer" containerID="fa46debf45bbc591d3f3d2ff279b0f4ae741603ae5627a9244c1052ab274ef8a" Dec 08 21:41:58 crc kubenswrapper[4791]: I1208 21:41:58.504927 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"6aaa3b083eed387233afd8e27c4bf5a9ae6d9cb075ffa437c67d681e065ad29e"} Dec 08 21:41:58 crc kubenswrapper[4791]: I1208 21:41:58.508103 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:41:58 crc kubenswrapper[4791]: I1208 21:41:58.952933 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.066642 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcw9n\" (UniqueName: \"kubernetes.io/projected/46c514cc-472d-4668-8ba5-d7c31377af02-kube-api-access-fcw9n\") pod \"46c514cc-472d-4668-8ba5-d7c31377af02\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067050 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run\") pod \"46c514cc-472d-4668-8ba5-d7c31377af02\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067141 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run-ovn\") pod \"46c514cc-472d-4668-8ba5-d7c31377af02\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067230 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run" (OuterVolumeSpecName: "var-run") pod "46c514cc-472d-4668-8ba5-d7c31377af02" (UID: "46c514cc-472d-4668-8ba5-d7c31377af02"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067325 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "46c514cc-472d-4668-8ba5-d7c31377af02" (UID: "46c514cc-472d-4668-8ba5-d7c31377af02"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067291 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-log-ovn\") pod \"46c514cc-472d-4668-8ba5-d7c31377af02\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067327 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "46c514cc-472d-4668-8ba5-d7c31377af02" (UID: "46c514cc-472d-4668-8ba5-d7c31377af02"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067533 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-scripts\") pod \"46c514cc-472d-4668-8ba5-d7c31377af02\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.067584 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-additional-scripts\") pod \"46c514cc-472d-4668-8ba5-d7c31377af02\" (UID: \"46c514cc-472d-4668-8ba5-d7c31377af02\") " Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.068330 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "46c514cc-472d-4668-8ba5-d7c31377af02" (UID: "46c514cc-472d-4668-8ba5-d7c31377af02"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.068523 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-scripts" (OuterVolumeSpecName: "scripts") pod "46c514cc-472d-4668-8ba5-d7c31377af02" (UID: "46c514cc-472d-4668-8ba5-d7c31377af02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.068711 4791 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.068755 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.068768 4791 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46c514cc-472d-4668-8ba5-d7c31377af02-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.068786 4791 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.068797 4791 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46c514cc-472d-4668-8ba5-d7c31377af02-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.088686 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46c514cc-472d-4668-8ba5-d7c31377af02-kube-api-access-fcw9n" (OuterVolumeSpecName: "kube-api-access-fcw9n") pod "46c514cc-472d-4668-8ba5-d7c31377af02" (UID: "46c514cc-472d-4668-8ba5-d7c31377af02"). InnerVolumeSpecName "kube-api-access-fcw9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.170350 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcw9n\" (UniqueName: \"kubernetes.io/projected/46c514cc-472d-4668-8ba5-d7c31377af02-kube-api-access-fcw9n\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.519963 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-g2zr4-config-cntwx" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.519952 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-g2zr4-config-cntwx" event={"ID":"46c514cc-472d-4668-8ba5-d7c31377af02","Type":"ContainerDied","Data":"2dbb2b7873b9823e552b55be497efc13ac48e3ebb95de52885b1d1618887ed27"} Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.520009 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dbb2b7873b9823e552b55be497efc13ac48e3ebb95de52885b1d1618887ed27" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.625740 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-g2zr4-config-cntwx"] Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.636360 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-g2zr4-config-cntwx"] Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.845553 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-g2zr4" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.884634 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:41:59 crc kubenswrapper[4791]: I1208 21:41:59.892775 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f7de8af-60f4-4571-bc47-95cb97ce0121-etc-swift\") pod \"swift-storage-0\" (UID: \"7f7de8af-60f4-4571-bc47-95cb97ce0121\") " pod="openstack/swift-storage-0" Dec 08 21:42:00 crc kubenswrapper[4791]: I1208 21:42:00.088947 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 08 21:42:01 crc kubenswrapper[4791]: I1208 21:42:01.238586 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-2" podUID="87530e07-a720-4b5f-bd6f-c3f8bb540453" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.129:5671: connect: connection refused" Dec 08 21:42:01 crc kubenswrapper[4791]: I1208 21:42:01.589925 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:42:01 crc kubenswrapper[4791]: I1208 21:42:01.634594 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46c514cc-472d-4668-8ba5-d7c31377af02" path="/var/lib/kubelet/pods/46c514cc-472d-4668-8ba5-d7c31377af02/volumes" Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.251882 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.252450 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.252495 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.253317 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"294a71027908b890218887589647a3ead6eb10efa3b336cb826e5a4ab73343c7"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.253404 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://294a71027908b890218887589647a3ead6eb10efa3b336cb826e5a4ab73343c7" gracePeriod=600 Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.602181 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="294a71027908b890218887589647a3ead6eb10efa3b336cb826e5a4ab73343c7" exitCode=0 Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.612043 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"294a71027908b890218887589647a3ead6eb10efa3b336cb826e5a4ab73343c7"} Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.612098 4791 scope.go:117] "RemoveContainer" containerID="2c818dc85098516c004b18fbf01aa57ea0f0a817f59523978cf0e86c2b78304e" Dec 08 21:42:05 crc kubenswrapper[4791]: I1208 21:42:05.885780 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:42:08 crc kubenswrapper[4791]: E1208 21:42:08.718342 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 08 21:42:08 crc kubenswrapper[4791]: E1208 21:42:08.718984 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zdvfx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-rrtnb_openstack(e74124c9-f94a-4168-a9b1-dafbcb9e0f70): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:42:08 crc kubenswrapper[4791]: E1208 21:42:08.720351 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-rrtnb" podUID="e74124c9-f94a-4168-a9b1-dafbcb9e0f70" Dec 08 21:42:09 crc kubenswrapper[4791]: W1208 21:42:09.269695 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f7de8af_60f4_4571_bc47_95cb97ce0121.slice/crio-2b497ef8e681e602b32b41a7f62b2079766fa0c34cee82a0d29e974ff3c2e941 WatchSource:0}: Error finding container 2b497ef8e681e602b32b41a7f62b2079766fa0c34cee82a0d29e974ff3c2e941: Status 404 returned error can't find the container with id 2b497ef8e681e602b32b41a7f62b2079766fa0c34cee82a0d29e974ff3c2e941 Dec 08 21:42:09 crc kubenswrapper[4791]: I1208 21:42:09.278179 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 08 21:42:09 crc kubenswrapper[4791]: I1208 21:42:09.664919 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43"} Dec 08 21:42:09 crc kubenswrapper[4791]: I1208 21:42:09.669458 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"2b497ef8e681e602b32b41a7f62b2079766fa0c34cee82a0d29e974ff3c2e941"} Dec 08 21:42:09 crc kubenswrapper[4791]: E1208 21:42:09.671402 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-rrtnb" podUID="e74124c9-f94a-4168-a9b1-dafbcb9e0f70" Dec 08 21:42:10 crc kubenswrapper[4791]: I1208 21:42:10.687756 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"e292d68b6e2418258623382a4fd7b7354b42530220896904be84ed86d2ad8c0b"} Dec 08 21:42:10 crc kubenswrapper[4791]: I1208 21:42:10.688509 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"84f43ebffc62a93ba4e041f830abe099bea2f1080a7b583b41e4d249e49ef9c7"} Dec 08 21:42:11 crc kubenswrapper[4791]: I1208 21:42:11.209013 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-1" podUID="6101a045-4b01-484e-a65b-4c406e458ea1" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.128:5671: connect: connection refused" Dec 08 21:42:11 crc kubenswrapper[4791]: I1208 21:42:11.238040 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-2" Dec 08 21:42:11 crc kubenswrapper[4791]: I1208 21:42:11.474413 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 08 21:42:11 crc kubenswrapper[4791]: I1208 21:42:11.703207 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"eeef957b4f50f1bfc9f375552f892384db4a7d10bde96701481bf2465e0c86ff"} Dec 08 21:42:11 crc kubenswrapper[4791]: I1208 21:42:11.703270 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"07adeeac672bd2a1b959dc6856b5561bd51dabb1ccfc3daf5c1d3844a0d711be"} Dec 08 21:42:16 crc kubenswrapper[4791]: I1208 21:42:16.782098 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"c026567879c7193bac164994b68db7167c23e8eacc0a8c5cf32596c1a47f81cc"} Dec 08 21:42:16 crc kubenswrapper[4791]: I1208 21:42:16.782778 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"98251cb0a05da4b934167e12af074bc3d34857e2401fda5ea9541c79f650e0ea"} Dec 08 21:42:16 crc kubenswrapper[4791]: I1208 21:42:16.782864 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"6819cb00a3b6597affcb58723b830166c095a7df34a0e003f9b80cba709ecc10"} Dec 08 21:42:16 crc kubenswrapper[4791]: I1208 21:42:16.782966 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"05125a6c20e313d294170f46620d806be229b69132739c5d27abefb1f7bad146"} Dec 08 21:42:19 crc kubenswrapper[4791]: I1208 21:42:19.120871 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"1699c8b88dd21829fe62a27f6ba2cf7910f790a6b8e7e627f1ccfe2648d157a2"} Dec 08 21:42:19 crc kubenswrapper[4791]: I1208 21:42:19.121365 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"5b8e7afe469143e2287050cd8291cd2d7b4442213de84010b4c234adda08b470"} Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.143656 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"13f3e667f06515ba0992f145d2eefb8c5999b2cb3f4c96f34cbebc2ee8d96213"} Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.144578 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"f45f545e20c3386d27df474389e64477e9fb61bcd7d9d59ee4f36f8fb7963e1b"} Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.144599 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"df41cadd2b69fab67d6a97499b817703bc3b84cfa667f77cb75c4c803bbbacbc"} Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.144612 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"e56505361e79eb8443f54ebc19876208be7103a1ad7e7fbeba59fe296ddc391c"} Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.144624 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"7f7de8af-60f4-4571-bc47-95cb97ce0121","Type":"ContainerStarted","Data":"ee8ac4b293878cba359b7ef07bcc39e830b8287c1db5898202fb4e7b89f2bf2b"} Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.185076 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=44.820225647 podStartE2EDuration="54.185043572s" podCreationTimestamp="2025-12-08 21:41:26 +0000 UTC" firstStartedPulling="2025-12-08 21:42:09.272528216 +0000 UTC m=+1405.971286561" lastFinishedPulling="2025-12-08 21:42:18.637346141 +0000 UTC m=+1415.336104486" observedRunningTime="2025-12-08 21:42:20.173946679 +0000 UTC m=+1416.872705084" watchObservedRunningTime="2025-12-08 21:42:20.185043572 +0000 UTC m=+1416.883801957" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.469914 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-m7h8z"] Dec 08 21:42:20 crc kubenswrapper[4791]: E1208 21:42:20.470451 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c514cc-472d-4668-8ba5-d7c31377af02" containerName="ovn-config" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.470471 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c514cc-472d-4668-8ba5-d7c31377af02" containerName="ovn-config" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.470677 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c514cc-472d-4668-8ba5-d7c31377af02" containerName="ovn-config" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.472022 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.475143 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.493972 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-m7h8z"] Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.563196 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.563509 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzpqf\" (UniqueName: \"kubernetes.io/projected/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-kube-api-access-fzpqf\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.563604 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-config\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.563728 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.563844 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.564069 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.665635 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.665759 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.665839 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzpqf\" (UniqueName: \"kubernetes.io/projected/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-kube-api-access-fzpqf\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.665880 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-config\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.665938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.665991 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.667504 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.667514 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.667550 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.667563 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-config\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.667983 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.685305 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzpqf\" (UniqueName: \"kubernetes.io/projected/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-kube-api-access-fzpqf\") pod \"dnsmasq-dns-5c79d794d7-m7h8z\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:20 crc kubenswrapper[4791]: I1208 21:42:20.788936 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.207873 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-1" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.272808 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-m7h8z"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.638212 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-d2jht"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.640214 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.663655 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-d2jht"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.680062 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-32a5-account-create-update-vpbkq"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.681785 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.683816 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.690067 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fptk\" (UniqueName: \"kubernetes.io/projected/83b4f5fa-551e-4849-baf6-7afb53700f1d-kube-api-access-6fptk\") pod \"cinder-db-create-d2jht\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.690399 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83b4f5fa-551e-4849-baf6-7afb53700f1d-operator-scripts\") pod \"cinder-db-create-d2jht\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.727349 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-32a5-account-create-update-vpbkq"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.763756 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-5rt7m"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.765676 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.784602 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-0f49-account-create-update-bxt2q"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.786499 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.792295 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b317f179-52ca-4d94-bd3a-c9cfd5096839-operator-scripts\") pod \"barbican-32a5-account-create-update-vpbkq\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.792412 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83b4f5fa-551e-4849-baf6-7afb53700f1d-operator-scripts\") pod \"cinder-db-create-d2jht\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.792508 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68k6t\" (UniqueName: \"kubernetes.io/projected/b317f179-52ca-4d94-bd3a-c9cfd5096839-kube-api-access-68k6t\") pod \"barbican-32a5-account-create-update-vpbkq\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.792544 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fptk\" (UniqueName: \"kubernetes.io/projected/83b4f5fa-551e-4849-baf6-7afb53700f1d-kube-api-access-6fptk\") pod \"cinder-db-create-d2jht\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.793583 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83b4f5fa-551e-4849-baf6-7afb53700f1d-operator-scripts\") pod \"cinder-db-create-d2jht\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.799467 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.821858 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-5rt7m"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.856571 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fptk\" (UniqueName: \"kubernetes.io/projected/83b4f5fa-551e-4849-baf6-7afb53700f1d-kube-api-access-6fptk\") pod \"cinder-db-create-d2jht\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.869580 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-0f49-account-create-update-bxt2q"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.901813 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-operator-scripts\") pod \"heat-0f49-account-create-update-bxt2q\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.905976 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srwk7\" (UniqueName: \"kubernetes.io/projected/ceb03ddf-9eca-4760-82da-ef871c8f2af7-kube-api-access-srwk7\") pod \"barbican-db-create-5rt7m\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.906774 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68k6t\" (UniqueName: \"kubernetes.io/projected/b317f179-52ca-4d94-bd3a-c9cfd5096839-kube-api-access-68k6t\") pod \"barbican-32a5-account-create-update-vpbkq\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.907040 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb03ddf-9eca-4760-82da-ef871c8f2af7-operator-scripts\") pod \"barbican-db-create-5rt7m\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.907227 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rgbm\" (UniqueName: \"kubernetes.io/projected/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-kube-api-access-9rgbm\") pod \"heat-0f49-account-create-update-bxt2q\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.907350 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b317f179-52ca-4d94-bd3a-c9cfd5096839-operator-scripts\") pod \"barbican-32a5-account-create-update-vpbkq\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.908446 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b317f179-52ca-4d94-bd3a-c9cfd5096839-operator-scripts\") pod \"barbican-32a5-account-create-update-vpbkq\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.928356 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-rdkss"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.930736 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-rdkss" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.942699 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68k6t\" (UniqueName: \"kubernetes.io/projected/b317f179-52ca-4d94-bd3a-c9cfd5096839-kube-api-access-68k6t\") pod \"barbican-32a5-account-create-update-vpbkq\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.956460 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-rdkss"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.969193 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-5ad9-account-create-update-p2wjh"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.973326 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.976160 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.979277 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5ad9-account-create-update-p2wjh"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.993850 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-xkfgv"] Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.995381 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.997577 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.998034 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.998046 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:42:21 crc kubenswrapper[4791]: I1208 21:42:21.998221 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kwdwq" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.008069 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xkfgv"] Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.009515 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb03ddf-9eca-4760-82da-ef871c8f2af7-operator-scripts\") pod \"barbican-db-create-5rt7m\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.009572 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rgbm\" (UniqueName: \"kubernetes.io/projected/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-kube-api-access-9rgbm\") pod \"heat-0f49-account-create-update-bxt2q\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.009609 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e35cda62-b9bc-4055-b831-2f8beb709d69-operator-scripts\") pod \"heat-db-create-rdkss\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " pod="openstack/heat-db-create-rdkss" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.009631 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj28l\" (UniqueName: \"kubernetes.io/projected/e35cda62-b9bc-4055-b831-2f8beb709d69-kube-api-access-fj28l\") pod \"heat-db-create-rdkss\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " pod="openstack/heat-db-create-rdkss" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.009667 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-operator-scripts\") pod \"heat-0f49-account-create-update-bxt2q\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.009757 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srwk7\" (UniqueName: \"kubernetes.io/projected/ceb03ddf-9eca-4760-82da-ef871c8f2af7-kube-api-access-srwk7\") pod \"barbican-db-create-5rt7m\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.010449 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb03ddf-9eca-4760-82da-ef871c8f2af7-operator-scripts\") pod \"barbican-db-create-5rt7m\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.012930 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-operator-scripts\") pod \"heat-0f49-account-create-update-bxt2q\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.032474 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rgbm\" (UniqueName: \"kubernetes.io/projected/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-kube-api-access-9rgbm\") pod \"heat-0f49-account-create-update-bxt2q\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.036197 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srwk7\" (UniqueName: \"kubernetes.io/projected/ceb03ddf-9eca-4760-82da-ef871c8f2af7-kube-api-access-srwk7\") pod \"barbican-db-create-5rt7m\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.071349 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.110917 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.112336 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/944249f8-45ae-4247-b092-54a0a081df4e-operator-scripts\") pod \"cinder-5ad9-account-create-update-p2wjh\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.112381 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-combined-ca-bundle\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.112413 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pfs4\" (UniqueName: \"kubernetes.io/projected/944249f8-45ae-4247-b092-54a0a081df4e-kube-api-access-9pfs4\") pod \"cinder-5ad9-account-create-update-p2wjh\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.112444 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-config-data\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.112555 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e35cda62-b9bc-4055-b831-2f8beb709d69-operator-scripts\") pod \"heat-db-create-rdkss\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " pod="openstack/heat-db-create-rdkss" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.112579 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj28l\" (UniqueName: \"kubernetes.io/projected/e35cda62-b9bc-4055-b831-2f8beb709d69-kube-api-access-fj28l\") pod \"heat-db-create-rdkss\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " pod="openstack/heat-db-create-rdkss" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.112593 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbx9z\" (UniqueName: \"kubernetes.io/projected/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-kube-api-access-cbx9z\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.113376 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e35cda62-b9bc-4055-b831-2f8beb709d69-operator-scripts\") pod \"heat-db-create-rdkss\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " pod="openstack/heat-db-create-rdkss" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.145672 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-9kxd9"] Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.146990 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.147396 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.157333 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-efc3-account-create-update-hxj9q"] Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.162865 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.165877 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj28l\" (UniqueName: \"kubernetes.io/projected/e35cda62-b9bc-4055-b831-2f8beb709d69-kube-api-access-fj28l\") pod \"heat-db-create-rdkss\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " pod="openstack/heat-db-create-rdkss" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.171327 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.178316 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-9kxd9"] Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.179463 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-rdkss" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.197818 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.198030 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-efc3-account-create-update-hxj9q"] Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.206066 4791 generic.go:334] "Generic (PLEG): container finished" podID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerID="6d67514fb4b6616bb01201c4f3c0d886e390f8ca5ba9089400cb30e06685b22a" exitCode=0 Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.206182 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" event={"ID":"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f","Type":"ContainerDied","Data":"6d67514fb4b6616bb01201c4f3c0d886e390f8ca5ba9089400cb30e06685b22a"} Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.206215 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" event={"ID":"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f","Type":"ContainerStarted","Data":"740df49a4847c9f376743eedfa02ad8ab302e14747cfff534fc727af1d6f9d85"} Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.217561 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbx9z\" (UniqueName: \"kubernetes.io/projected/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-kube-api-access-cbx9z\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.217770 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/944249f8-45ae-4247-b092-54a0a081df4e-operator-scripts\") pod \"cinder-5ad9-account-create-update-p2wjh\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.217864 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-combined-ca-bundle\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.217933 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvkml\" (UniqueName: \"kubernetes.io/projected/a1085ae8-9862-42a2-9c52-561c82c2e966-kube-api-access-xvkml\") pod \"neutron-efc3-account-create-update-hxj9q\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.217979 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pfs4\" (UniqueName: \"kubernetes.io/projected/944249f8-45ae-4247-b092-54a0a081df4e-kube-api-access-9pfs4\") pod \"cinder-5ad9-account-create-update-p2wjh\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.218084 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-config-data\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.218192 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z76nb\" (UniqueName: \"kubernetes.io/projected/759af777-0707-490e-87e1-6f15b83fbfa0-kube-api-access-z76nb\") pod \"neutron-db-create-9kxd9\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.218218 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/759af777-0707-490e-87e1-6f15b83fbfa0-operator-scripts\") pod \"neutron-db-create-9kxd9\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.218290 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1085ae8-9862-42a2-9c52-561c82c2e966-operator-scripts\") pod \"neutron-efc3-account-create-update-hxj9q\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.218949 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/944249f8-45ae-4247-b092-54a0a081df4e-operator-scripts\") pod \"cinder-5ad9-account-create-update-p2wjh\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.225833 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-combined-ca-bundle\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.236014 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-config-data\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.257462 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbx9z\" (UniqueName: \"kubernetes.io/projected/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-kube-api-access-cbx9z\") pod \"keystone-db-sync-xkfgv\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.257897 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pfs4\" (UniqueName: \"kubernetes.io/projected/944249f8-45ae-4247-b092-54a0a081df4e-kube-api-access-9pfs4\") pod \"cinder-5ad9-account-create-update-p2wjh\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.326365 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/759af777-0707-490e-87e1-6f15b83fbfa0-operator-scripts\") pod \"neutron-db-create-9kxd9\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.326879 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1085ae8-9862-42a2-9c52-561c82c2e966-operator-scripts\") pod \"neutron-efc3-account-create-update-hxj9q\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.327152 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvkml\" (UniqueName: \"kubernetes.io/projected/a1085ae8-9862-42a2-9c52-561c82c2e966-kube-api-access-xvkml\") pod \"neutron-efc3-account-create-update-hxj9q\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.327299 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z76nb\" (UniqueName: \"kubernetes.io/projected/759af777-0707-490e-87e1-6f15b83fbfa0-kube-api-access-z76nb\") pod \"neutron-db-create-9kxd9\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.327367 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/759af777-0707-490e-87e1-6f15b83fbfa0-operator-scripts\") pod \"neutron-db-create-9kxd9\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.328772 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1085ae8-9862-42a2-9c52-561c82c2e966-operator-scripts\") pod \"neutron-efc3-account-create-update-hxj9q\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.358346 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvkml\" (UniqueName: \"kubernetes.io/projected/a1085ae8-9862-42a2-9c52-561c82c2e966-kube-api-access-xvkml\") pod \"neutron-efc3-account-create-update-hxj9q\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.361650 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z76nb\" (UniqueName: \"kubernetes.io/projected/759af777-0707-490e-87e1-6f15b83fbfa0-kube-api-access-z76nb\") pod \"neutron-db-create-9kxd9\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.497866 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.522434 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.572582 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.594292 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.698800 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-32a5-account-create-update-vpbkq"] Dec 08 21:42:22 crc kubenswrapper[4791]: I1208 21:42:22.734917 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-d2jht"] Dec 08 21:42:22 crc kubenswrapper[4791]: W1208 21:42:22.771240 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb317f179_52ca_4d94_bd3a_c9cfd5096839.slice/crio-dcb60d0d7bae64cfafd298e72d07292baf6a759709d4bae7f732b7a819abd307 WatchSource:0}: Error finding container dcb60d0d7bae64cfafd298e72d07292baf6a759709d4bae7f732b7a819abd307: Status 404 returned error can't find the container with id dcb60d0d7bae64cfafd298e72d07292baf6a759709d4bae7f732b7a819abd307 Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.026721 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pmmmw"] Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.035809 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.062615 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pmmmw"] Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.150538 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-catalog-content\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.150958 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-utilities\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.151025 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59th7\" (UniqueName: \"kubernetes.io/projected/d9780822-bcb2-4576-a005-16807709c69b-kube-api-access-59th7\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.216803 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" event={"ID":"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f","Type":"ContainerStarted","Data":"6ae75eb34657f585e1939faacaa48865a1916b8e299a99eefbff873707b6af10"} Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.218076 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.222071 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-32a5-account-create-update-vpbkq" event={"ID":"b317f179-52ca-4d94-bd3a-c9cfd5096839","Type":"ContainerStarted","Data":"f5c5924f8877ffda7f15eca0220f57bb6107401463c671e23f93d215ee3416e6"} Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.222124 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-32a5-account-create-update-vpbkq" event={"ID":"b317f179-52ca-4d94-bd3a-c9cfd5096839","Type":"ContainerStarted","Data":"dcb60d0d7bae64cfafd298e72d07292baf6a759709d4bae7f732b7a819abd307"} Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.226490 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-d2jht" event={"ID":"83b4f5fa-551e-4849-baf6-7afb53700f1d","Type":"ContainerStarted","Data":"f90d6e30283c2e6431c5c3de37201c866c6b83e4db08c5a645bbadb6ef2ad988"} Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.253255 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-utilities\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.253335 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59th7\" (UniqueName: \"kubernetes.io/projected/d9780822-bcb2-4576-a005-16807709c69b-kube-api-access-59th7\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.253394 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-catalog-content\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.254053 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-catalog-content\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.257082 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-utilities\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.273500 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" podStartSLOduration=3.273478551 podStartE2EDuration="3.273478551s" podCreationTimestamp="2025-12-08 21:42:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:23.248449776 +0000 UTC m=+1419.947208121" watchObservedRunningTime="2025-12-08 21:42:23.273478551 +0000 UTC m=+1419.972236896" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.293291 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-32a5-account-create-update-vpbkq" podStartSLOduration=2.293271988 podStartE2EDuration="2.293271988s" podCreationTimestamp="2025-12-08 21:42:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:23.263333792 +0000 UTC m=+1419.962092137" watchObservedRunningTime="2025-12-08 21:42:23.293271988 +0000 UTC m=+1419.992030333" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.309744 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59th7\" (UniqueName: \"kubernetes.io/projected/d9780822-bcb2-4576-a005-16807709c69b-kube-api-access-59th7\") pod \"community-operators-pmmmw\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.344888 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-5rt7m"] Dec 08 21:42:23 crc kubenswrapper[4791]: W1208 21:42:23.346454 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod944249f8_45ae_4247_b092_54a0a081df4e.slice/crio-166f337220ae0a5cdb51c61a78b414ee4eab1d7eaf0bcb51d2cfb4f0112dc08b WatchSource:0}: Error finding container 166f337220ae0a5cdb51c61a78b414ee4eab1d7eaf0bcb51d2cfb4f0112dc08b: Status 404 returned error can't find the container with id 166f337220ae0a5cdb51c61a78b414ee4eab1d7eaf0bcb51d2cfb4f0112dc08b Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.360080 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5ad9-account-create-update-p2wjh"] Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.392826 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.409091 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-rdkss"] Dec 08 21:42:23 crc kubenswrapper[4791]: W1208 21:42:23.420513 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode35cda62_b9bc_4055_b831_2f8beb709d69.slice/crio-9daf6c3b8b124e8d23cc0412058dc994a8adf32c35fe04b21bd94b4607cf1d4e WatchSource:0}: Error finding container 9daf6c3b8b124e8d23cc0412058dc994a8adf32c35fe04b21bd94b4607cf1d4e: Status 404 returned error can't find the container with id 9daf6c3b8b124e8d23cc0412058dc994a8adf32c35fe04b21bd94b4607cf1d4e Dec 08 21:42:23 crc kubenswrapper[4791]: W1208 21:42:23.425995 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55f1c8c7_116e_4f7f_9b3e_b94c44b5a755.slice/crio-b9603631ea2c409fbac3cb213aba8570cb5c7428f7a3b40dd5e3a01fbc4f0dfd WatchSource:0}: Error finding container b9603631ea2c409fbac3cb213aba8570cb5c7428f7a3b40dd5e3a01fbc4f0dfd: Status 404 returned error can't find the container with id b9603631ea2c409fbac3cb213aba8570cb5c7428f7a3b40dd5e3a01fbc4f0dfd Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.443523 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-0f49-account-create-update-bxt2q"] Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.512946 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xkfgv"] Dec 08 21:42:23 crc kubenswrapper[4791]: W1208 21:42:23.562235 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5e36e4f_4023_4a5a_9ae3_34cde7af452d.slice/crio-4046cd7082748397b64cc760bf8ef654586420ec848ee17a13fe1dfa7280b53d WatchSource:0}: Error finding container 4046cd7082748397b64cc760bf8ef654586420ec848ee17a13fe1dfa7280b53d: Status 404 returned error can't find the container with id 4046cd7082748397b64cc760bf8ef654586420ec848ee17a13fe1dfa7280b53d Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.766104 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-9kxd9"] Dec 08 21:42:23 crc kubenswrapper[4791]: I1208 21:42:23.766414 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-efc3-account-create-update-hxj9q"] Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.246792 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-rdkss" event={"ID":"e35cda62-b9bc-4055-b831-2f8beb709d69","Type":"ContainerStarted","Data":"9daf6c3b8b124e8d23cc0412058dc994a8adf32c35fe04b21bd94b4607cf1d4e"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.253038 4791 generic.go:334] "Generic (PLEG): container finished" podID="83b4f5fa-551e-4849-baf6-7afb53700f1d" containerID="2e262a6f07eeebbe312adce5e28ff3bc470a5b43f6a4283da0d885b98006b53b" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.253296 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-d2jht" event={"ID":"83b4f5fa-551e-4849-baf6-7afb53700f1d","Type":"ContainerDied","Data":"2e262a6f07eeebbe312adce5e28ff3bc470a5b43f6a4283da0d885b98006b53b"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.255392 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9kxd9" event={"ID":"759af777-0707-490e-87e1-6f15b83fbfa0","Type":"ContainerStarted","Data":"ed61162e66a0d89646666fea4b4d74e3fbb78fd86a35ace4d9bf0d3a3af2709b"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.257817 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-0f49-account-create-update-bxt2q" event={"ID":"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755","Type":"ContainerStarted","Data":"b9603631ea2c409fbac3cb213aba8570cb5c7428f7a3b40dd5e3a01fbc4f0dfd"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.259154 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xkfgv" event={"ID":"f5e36e4f-4023-4a5a-9ae3-34cde7af452d","Type":"ContainerStarted","Data":"4046cd7082748397b64cc760bf8ef654586420ec848ee17a13fe1dfa7280b53d"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.260824 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pmmmw"] Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.262220 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-efc3-account-create-update-hxj9q" event={"ID":"a1085ae8-9862-42a2-9c52-561c82c2e966","Type":"ContainerStarted","Data":"f831c5ab776be84ff2b93b108b96da0b9d0f4e5669d3a503f179fceb00e65643"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.268470 4791 generic.go:334] "Generic (PLEG): container finished" podID="b317f179-52ca-4d94-bd3a-c9cfd5096839" containerID="f5c5924f8877ffda7f15eca0220f57bb6107401463c671e23f93d215ee3416e6" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.268532 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-32a5-account-create-update-vpbkq" event={"ID":"b317f179-52ca-4d94-bd3a-c9cfd5096839","Type":"ContainerDied","Data":"f5c5924f8877ffda7f15eca0220f57bb6107401463c671e23f93d215ee3416e6"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.271233 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5rt7m" event={"ID":"ceb03ddf-9eca-4760-82da-ef871c8f2af7","Type":"ContainerStarted","Data":"6113d19cb941d8a3159780009ba1b41b0740472cbf8e1e7276afac941cd5e104"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.271266 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5rt7m" event={"ID":"ceb03ddf-9eca-4760-82da-ef871c8f2af7","Type":"ContainerStarted","Data":"0ec2129d1dc96093f156a65f38e4d7b084913a3c82251d4a65c181cca80ef23d"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.274064 4791 generic.go:334] "Generic (PLEG): container finished" podID="944249f8-45ae-4247-b092-54a0a081df4e" containerID="ae5fc73a6cfbfb0c28199a16ecc2d5338e32e1b0a02ce8a4d11e115f5935b3b7" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.274130 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5ad9-account-create-update-p2wjh" event={"ID":"944249f8-45ae-4247-b092-54a0a081df4e","Type":"ContainerDied","Data":"ae5fc73a6cfbfb0c28199a16ecc2d5338e32e1b0a02ce8a4d11e115f5935b3b7"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.274168 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5ad9-account-create-update-p2wjh" event={"ID":"944249f8-45ae-4247-b092-54a0a081df4e","Type":"ContainerStarted","Data":"166f337220ae0a5cdb51c61a78b414ee4eab1d7eaf0bcb51d2cfb4f0112dc08b"} Dec 08 21:42:24 crc kubenswrapper[4791]: I1208 21:42:24.325533 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-5rt7m" podStartSLOduration=3.325507886 podStartE2EDuration="3.325507886s" podCreationTimestamp="2025-12-08 21:42:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:24.309860991 +0000 UTC m=+1421.008619336" watchObservedRunningTime="2025-12-08 21:42:24.325507886 +0000 UTC m=+1421.024266231" Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.285939 4791 generic.go:334] "Generic (PLEG): container finished" podID="a1085ae8-9862-42a2-9c52-561c82c2e966" containerID="11491c3efe1deaddedc84ca292aa8e162a34a9c2d29b24af5512a91cc2638753" exitCode=0 Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.286039 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-efc3-account-create-update-hxj9q" event={"ID":"a1085ae8-9862-42a2-9c52-561c82c2e966","Type":"ContainerDied","Data":"11491c3efe1deaddedc84ca292aa8e162a34a9c2d29b24af5512a91cc2638753"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.289401 4791 generic.go:334] "Generic (PLEG): container finished" podID="ceb03ddf-9eca-4760-82da-ef871c8f2af7" containerID="6113d19cb941d8a3159780009ba1b41b0740472cbf8e1e7276afac941cd5e104" exitCode=0 Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.289458 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5rt7m" event={"ID":"ceb03ddf-9eca-4760-82da-ef871c8f2af7","Type":"ContainerDied","Data":"6113d19cb941d8a3159780009ba1b41b0740472cbf8e1e7276afac941cd5e104"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.290796 4791 generic.go:334] "Generic (PLEG): container finished" podID="759af777-0707-490e-87e1-6f15b83fbfa0" containerID="1c4479a48997ee5b056ce27a91b76ae350f75cca7e51b077b140e8c4bc508256" exitCode=0 Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.290902 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9kxd9" event={"ID":"759af777-0707-490e-87e1-6f15b83fbfa0","Type":"ContainerDied","Data":"1c4479a48997ee5b056ce27a91b76ae350f75cca7e51b077b140e8c4bc508256"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.292748 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rrtnb" event={"ID":"e74124c9-f94a-4168-a9b1-dafbcb9e0f70","Type":"ContainerStarted","Data":"0e8bd64f021b879cf2a2348f728c3648af903cf50353439466651a66f3406f8a"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.295478 4791 generic.go:334] "Generic (PLEG): container finished" podID="d9780822-bcb2-4576-a005-16807709c69b" containerID="0e3777f9fbc6f74d7b48caed52a0a0dc3d1640ecf14d3ed5537ca8d19afd55be" exitCode=0 Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.295554 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmmmw" event={"ID":"d9780822-bcb2-4576-a005-16807709c69b","Type":"ContainerDied","Data":"0e3777f9fbc6f74d7b48caed52a0a0dc3d1640ecf14d3ed5537ca8d19afd55be"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.295584 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmmmw" event={"ID":"d9780822-bcb2-4576-a005-16807709c69b","Type":"ContainerStarted","Data":"3650278cfe9686d4be1cece26b0f127edeb822892a26466360519eb4d2e5cc0b"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.298099 4791 generic.go:334] "Generic (PLEG): container finished" podID="55f1c8c7-116e-4f7f-9b3e-b94c44b5a755" containerID="08feded04ccfa47f6af9d1157fda3fe4526d6b561c00654fcb81f240a08b959e" exitCode=0 Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.298175 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-0f49-account-create-update-bxt2q" event={"ID":"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755","Type":"ContainerDied","Data":"08feded04ccfa47f6af9d1157fda3fe4526d6b561c00654fcb81f240a08b959e"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.302274 4791 generic.go:334] "Generic (PLEG): container finished" podID="e35cda62-b9bc-4055-b831-2f8beb709d69" containerID="edc00757880c8ddcd5ac505ae94b8ebe2c5f7cef390d22e63761fc535b22b679" exitCode=0 Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.302550 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-rdkss" event={"ID":"e35cda62-b9bc-4055-b831-2f8beb709d69","Type":"ContainerDied","Data":"edc00757880c8ddcd5ac505ae94b8ebe2c5f7cef390d22e63761fc535b22b679"} Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.332160 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-rrtnb" podStartSLOduration=2.5845797790000002 podStartE2EDuration="35.332132613s" podCreationTimestamp="2025-12-08 21:41:50 +0000 UTC" firstStartedPulling="2025-12-08 21:41:51.376278073 +0000 UTC m=+1388.075036418" lastFinishedPulling="2025-12-08 21:42:24.123830907 +0000 UTC m=+1420.822589252" observedRunningTime="2025-12-08 21:42:25.32224481 +0000 UTC m=+1422.021003155" watchObservedRunningTime="2025-12-08 21:42:25.332132613 +0000 UTC m=+1422.030890958" Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.908317 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.967961 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fptk\" (UniqueName: \"kubernetes.io/projected/83b4f5fa-551e-4849-baf6-7afb53700f1d-kube-api-access-6fptk\") pod \"83b4f5fa-551e-4849-baf6-7afb53700f1d\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.968150 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83b4f5fa-551e-4849-baf6-7afb53700f1d-operator-scripts\") pod \"83b4f5fa-551e-4849-baf6-7afb53700f1d\" (UID: \"83b4f5fa-551e-4849-baf6-7afb53700f1d\") " Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.969023 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83b4f5fa-551e-4849-baf6-7afb53700f1d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "83b4f5fa-551e-4849-baf6-7afb53700f1d" (UID: "83b4f5fa-551e-4849-baf6-7afb53700f1d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:25 crc kubenswrapper[4791]: I1208 21:42:25.979926 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83b4f5fa-551e-4849-baf6-7afb53700f1d-kube-api-access-6fptk" (OuterVolumeSpecName: "kube-api-access-6fptk") pod "83b4f5fa-551e-4849-baf6-7afb53700f1d" (UID: "83b4f5fa-551e-4849-baf6-7afb53700f1d"). InnerVolumeSpecName "kube-api-access-6fptk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.073234 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fptk\" (UniqueName: \"kubernetes.io/projected/83b4f5fa-551e-4849-baf6-7afb53700f1d-kube-api-access-6fptk\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.073268 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83b4f5fa-551e-4849-baf6-7afb53700f1d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.107393 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.124431 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.175046 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b317f179-52ca-4d94-bd3a-c9cfd5096839-operator-scripts\") pod \"b317f179-52ca-4d94-bd3a-c9cfd5096839\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.175248 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pfs4\" (UniqueName: \"kubernetes.io/projected/944249f8-45ae-4247-b092-54a0a081df4e-kube-api-access-9pfs4\") pod \"944249f8-45ae-4247-b092-54a0a081df4e\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.175348 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/944249f8-45ae-4247-b092-54a0a081df4e-operator-scripts\") pod \"944249f8-45ae-4247-b092-54a0a081df4e\" (UID: \"944249f8-45ae-4247-b092-54a0a081df4e\") " Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.175403 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68k6t\" (UniqueName: \"kubernetes.io/projected/b317f179-52ca-4d94-bd3a-c9cfd5096839-kube-api-access-68k6t\") pod \"b317f179-52ca-4d94-bd3a-c9cfd5096839\" (UID: \"b317f179-52ca-4d94-bd3a-c9cfd5096839\") " Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.175718 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b317f179-52ca-4d94-bd3a-c9cfd5096839-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b317f179-52ca-4d94-bd3a-c9cfd5096839" (UID: "b317f179-52ca-4d94-bd3a-c9cfd5096839"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.175952 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/944249f8-45ae-4247-b092-54a0a081df4e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "944249f8-45ae-4247-b092-54a0a081df4e" (UID: "944249f8-45ae-4247-b092-54a0a081df4e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.176043 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b317f179-52ca-4d94-bd3a-c9cfd5096839-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.182943 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/944249f8-45ae-4247-b092-54a0a081df4e-kube-api-access-9pfs4" (OuterVolumeSpecName: "kube-api-access-9pfs4") pod "944249f8-45ae-4247-b092-54a0a081df4e" (UID: "944249f8-45ae-4247-b092-54a0a081df4e"). InnerVolumeSpecName "kube-api-access-9pfs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.195321 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b317f179-52ca-4d94-bd3a-c9cfd5096839-kube-api-access-68k6t" (OuterVolumeSpecName: "kube-api-access-68k6t") pod "b317f179-52ca-4d94-bd3a-c9cfd5096839" (UID: "b317f179-52ca-4d94-bd3a-c9cfd5096839"). InnerVolumeSpecName "kube-api-access-68k6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.278112 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pfs4\" (UniqueName: \"kubernetes.io/projected/944249f8-45ae-4247-b092-54a0a081df4e-kube-api-access-9pfs4\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.278149 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/944249f8-45ae-4247-b092-54a0a081df4e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.278161 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68k6t\" (UniqueName: \"kubernetes.io/projected/b317f179-52ca-4d94-bd3a-c9cfd5096839-kube-api-access-68k6t\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.318628 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5ad9-account-create-update-p2wjh" event={"ID":"944249f8-45ae-4247-b092-54a0a081df4e","Type":"ContainerDied","Data":"166f337220ae0a5cdb51c61a78b414ee4eab1d7eaf0bcb51d2cfb4f0112dc08b"} Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.318672 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="166f337220ae0a5cdb51c61a78b414ee4eab1d7eaf0bcb51d2cfb4f0112dc08b" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.318641 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ad9-account-create-update-p2wjh" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.322854 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-32a5-account-create-update-vpbkq" event={"ID":"b317f179-52ca-4d94-bd3a-c9cfd5096839","Type":"ContainerDied","Data":"dcb60d0d7bae64cfafd298e72d07292baf6a759709d4bae7f732b7a819abd307"} Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.322898 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcb60d0d7bae64cfafd298e72d07292baf6a759709d4bae7f732b7a819abd307" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.322957 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-32a5-account-create-update-vpbkq" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.332378 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-d2jht" event={"ID":"83b4f5fa-551e-4849-baf6-7afb53700f1d","Type":"ContainerDied","Data":"f90d6e30283c2e6431c5c3de37201c866c6b83e4db08c5a645bbadb6ef2ad988"} Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.332418 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f90d6e30283c2e6431c5c3de37201c866c6b83e4db08c5a645bbadb6ef2ad988" Dec 08 21:42:26 crc kubenswrapper[4791]: I1208 21:42:26.334769 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d2jht" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.103020 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.147339 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.156300 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-rdkss" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.186383 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.212406 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.246620 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/759af777-0707-490e-87e1-6f15b83fbfa0-operator-scripts\") pod \"759af777-0707-490e-87e1-6f15b83fbfa0\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.246836 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z76nb\" (UniqueName: \"kubernetes.io/projected/759af777-0707-490e-87e1-6f15b83fbfa0-kube-api-access-z76nb\") pod \"759af777-0707-490e-87e1-6f15b83fbfa0\" (UID: \"759af777-0707-490e-87e1-6f15b83fbfa0\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.247014 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srwk7\" (UniqueName: \"kubernetes.io/projected/ceb03ddf-9eca-4760-82da-ef871c8f2af7-kube-api-access-srwk7\") pod \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.247125 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e35cda62-b9bc-4055-b831-2f8beb709d69-operator-scripts\") pod \"e35cda62-b9bc-4055-b831-2f8beb709d69\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.247285 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj28l\" (UniqueName: \"kubernetes.io/projected/e35cda62-b9bc-4055-b831-2f8beb709d69-kube-api-access-fj28l\") pod \"e35cda62-b9bc-4055-b831-2f8beb709d69\" (UID: \"e35cda62-b9bc-4055-b831-2f8beb709d69\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.247431 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1085ae8-9862-42a2-9c52-561c82c2e966-operator-scripts\") pod \"a1085ae8-9862-42a2-9c52-561c82c2e966\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.247556 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb03ddf-9eca-4760-82da-ef871c8f2af7-operator-scripts\") pod \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\" (UID: \"ceb03ddf-9eca-4760-82da-ef871c8f2af7\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.247729 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvkml\" (UniqueName: \"kubernetes.io/projected/a1085ae8-9862-42a2-9c52-561c82c2e966-kube-api-access-xvkml\") pod \"a1085ae8-9862-42a2-9c52-561c82c2e966\" (UID: \"a1085ae8-9862-42a2-9c52-561c82c2e966\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.249219 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/759af777-0707-490e-87e1-6f15b83fbfa0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "759af777-0707-490e-87e1-6f15b83fbfa0" (UID: "759af777-0707-490e-87e1-6f15b83fbfa0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.250595 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ceb03ddf-9eca-4760-82da-ef871c8f2af7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ceb03ddf-9eca-4760-82da-ef871c8f2af7" (UID: "ceb03ddf-9eca-4760-82da-ef871c8f2af7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.251090 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1085ae8-9862-42a2-9c52-561c82c2e966-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a1085ae8-9862-42a2-9c52-561c82c2e966" (UID: "a1085ae8-9862-42a2-9c52-561c82c2e966"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.251112 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e35cda62-b9bc-4055-b831-2f8beb709d69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e35cda62-b9bc-4055-b831-2f8beb709d69" (UID: "e35cda62-b9bc-4055-b831-2f8beb709d69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.256958 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1085ae8-9862-42a2-9c52-561c82c2e966-kube-api-access-xvkml" (OuterVolumeSpecName: "kube-api-access-xvkml") pod "a1085ae8-9862-42a2-9c52-561c82c2e966" (UID: "a1085ae8-9862-42a2-9c52-561c82c2e966"). InnerVolumeSpecName "kube-api-access-xvkml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.265073 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/759af777-0707-490e-87e1-6f15b83fbfa0-kube-api-access-z76nb" (OuterVolumeSpecName: "kube-api-access-z76nb") pod "759af777-0707-490e-87e1-6f15b83fbfa0" (UID: "759af777-0707-490e-87e1-6f15b83fbfa0"). InnerVolumeSpecName "kube-api-access-z76nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.265233 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e35cda62-b9bc-4055-b831-2f8beb709d69-kube-api-access-fj28l" (OuterVolumeSpecName: "kube-api-access-fj28l") pod "e35cda62-b9bc-4055-b831-2f8beb709d69" (UID: "e35cda62-b9bc-4055-b831-2f8beb709d69"). InnerVolumeSpecName "kube-api-access-fj28l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.281384 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ceb03ddf-9eca-4760-82da-ef871c8f2af7-kube-api-access-srwk7" (OuterVolumeSpecName: "kube-api-access-srwk7") pod "ceb03ddf-9eca-4760-82da-ef871c8f2af7" (UID: "ceb03ddf-9eca-4760-82da-ef871c8f2af7"). InnerVolumeSpecName "kube-api-access-srwk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.350357 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-operator-scripts\") pod \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.350600 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rgbm\" (UniqueName: \"kubernetes.io/projected/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-kube-api-access-9rgbm\") pod \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\" (UID: \"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755\") " Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351039 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "55f1c8c7-116e-4f7f-9b3e-b94c44b5a755" (UID: "55f1c8c7-116e-4f7f-9b3e-b94c44b5a755"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351673 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj28l\" (UniqueName: \"kubernetes.io/projected/e35cda62-b9bc-4055-b831-2f8beb709d69-kube-api-access-fj28l\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351768 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351792 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1085ae8-9862-42a2-9c52-561c82c2e966-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351805 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb03ddf-9eca-4760-82da-ef871c8f2af7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351817 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvkml\" (UniqueName: \"kubernetes.io/projected/a1085ae8-9862-42a2-9c52-561c82c2e966-kube-api-access-xvkml\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351829 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/759af777-0707-490e-87e1-6f15b83fbfa0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351843 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z76nb\" (UniqueName: \"kubernetes.io/projected/759af777-0707-490e-87e1-6f15b83fbfa0-kube-api-access-z76nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351856 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srwk7\" (UniqueName: \"kubernetes.io/projected/ceb03ddf-9eca-4760-82da-ef871c8f2af7-kube-api-access-srwk7\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.351871 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e35cda62-b9bc-4055-b831-2f8beb709d69-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.354134 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-kube-api-access-9rgbm" (OuterVolumeSpecName: "kube-api-access-9rgbm") pod "55f1c8c7-116e-4f7f-9b3e-b94c44b5a755" (UID: "55f1c8c7-116e-4f7f-9b3e-b94c44b5a755"). InnerVolumeSpecName "kube-api-access-9rgbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.366249 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-efc3-account-create-update-hxj9q" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.366236 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-efc3-account-create-update-hxj9q" event={"ID":"a1085ae8-9862-42a2-9c52-561c82c2e966","Type":"ContainerDied","Data":"f831c5ab776be84ff2b93b108b96da0b9d0f4e5669d3a503f179fceb00e65643"} Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.366364 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f831c5ab776be84ff2b93b108b96da0b9d0f4e5669d3a503f179fceb00e65643" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.372294 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5rt7m" event={"ID":"ceb03ddf-9eca-4760-82da-ef871c8f2af7","Type":"ContainerDied","Data":"0ec2129d1dc96093f156a65f38e4d7b084913a3c82251d4a65c181cca80ef23d"} Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.372353 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ec2129d1dc96093f156a65f38e4d7b084913a3c82251d4a65c181cca80ef23d" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.372373 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5rt7m" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.375143 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9kxd9" event={"ID":"759af777-0707-490e-87e1-6f15b83fbfa0","Type":"ContainerDied","Data":"ed61162e66a0d89646666fea4b4d74e3fbb78fd86a35ace4d9bf0d3a3af2709b"} Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.375200 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed61162e66a0d89646666fea4b4d74e3fbb78fd86a35ace4d9bf0d3a3af2709b" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.375232 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9kxd9" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.377345 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmmmw" event={"ID":"d9780822-bcb2-4576-a005-16807709c69b","Type":"ContainerStarted","Data":"e498cf25bdf3b538c6937e955c5ea536267edfdc1b66f60e7ae12a257b6dabb8"} Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.383218 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-0f49-account-create-update-bxt2q" event={"ID":"55f1c8c7-116e-4f7f-9b3e-b94c44b5a755","Type":"ContainerDied","Data":"b9603631ea2c409fbac3cb213aba8570cb5c7428f7a3b40dd5e3a01fbc4f0dfd"} Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.383255 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9603631ea2c409fbac3cb213aba8570cb5c7428f7a3b40dd5e3a01fbc4f0dfd" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.383349 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0f49-account-create-update-bxt2q" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.390821 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xkfgv" event={"ID":"f5e36e4f-4023-4a5a-9ae3-34cde7af452d","Type":"ContainerStarted","Data":"f152d0c1aae53adec81b09eb831ae39d9dfc33aa763d64eb2b80a69b9cdddb0f"} Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.394042 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-rdkss" event={"ID":"e35cda62-b9bc-4055-b831-2f8beb709d69","Type":"ContainerDied","Data":"9daf6c3b8b124e8d23cc0412058dc994a8adf32c35fe04b21bd94b4607cf1d4e"} Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.394072 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9daf6c3b8b124e8d23cc0412058dc994a8adf32c35fe04b21bd94b4607cf1d4e" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.394107 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-rdkss" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.431478 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-xkfgv" podStartSLOduration=3.133995827 podStartE2EDuration="8.431455445s" podCreationTimestamp="2025-12-08 21:42:21 +0000 UTC" firstStartedPulling="2025-12-08 21:42:23.583192136 +0000 UTC m=+1420.281950481" lastFinishedPulling="2025-12-08 21:42:28.880651754 +0000 UTC m=+1425.579410099" observedRunningTime="2025-12-08 21:42:29.422961087 +0000 UTC m=+1426.121719432" watchObservedRunningTime="2025-12-08 21:42:29.431455445 +0000 UTC m=+1426.130213790" Dec 08 21:42:29 crc kubenswrapper[4791]: I1208 21:42:29.456772 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rgbm\" (UniqueName: \"kubernetes.io/projected/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755-kube-api-access-9rgbm\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:30 crc kubenswrapper[4791]: I1208 21:42:30.412087 4791 generic.go:334] "Generic (PLEG): container finished" podID="d9780822-bcb2-4576-a005-16807709c69b" containerID="e498cf25bdf3b538c6937e955c5ea536267edfdc1b66f60e7ae12a257b6dabb8" exitCode=0 Dec 08 21:42:30 crc kubenswrapper[4791]: I1208 21:42:30.413827 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmmmw" event={"ID":"d9780822-bcb2-4576-a005-16807709c69b","Type":"ContainerDied","Data":"e498cf25bdf3b538c6937e955c5ea536267edfdc1b66f60e7ae12a257b6dabb8"} Dec 08 21:42:30 crc kubenswrapper[4791]: I1208 21:42:30.790915 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:42:30 crc kubenswrapper[4791]: I1208 21:42:30.861453 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-7cwhm"] Dec 08 21:42:30 crc kubenswrapper[4791]: I1208 21:42:30.873220 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" podUID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerName="dnsmasq-dns" containerID="cri-o://9235cecb8e67c24fa098fba18ee7fa46c517692f9caabd3e89d5ed8874928a35" gracePeriod=10 Dec 08 21:42:31 crc kubenswrapper[4791]: I1208 21:42:31.425833 4791 generic.go:334] "Generic (PLEG): container finished" podID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerID="9235cecb8e67c24fa098fba18ee7fa46c517692f9caabd3e89d5ed8874928a35" exitCode=0 Dec 08 21:42:31 crc kubenswrapper[4791]: I1208 21:42:31.425913 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" event={"ID":"4ad73e16-b73d-48d2-9968-934d17c0dea1","Type":"ContainerDied","Data":"9235cecb8e67c24fa098fba18ee7fa46c517692f9caabd3e89d5ed8874928a35"} Dec 08 21:42:31 crc kubenswrapper[4791]: I1208 21:42:31.428944 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmmmw" event={"ID":"d9780822-bcb2-4576-a005-16807709c69b","Type":"ContainerStarted","Data":"f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870"} Dec 08 21:42:31 crc kubenswrapper[4791]: I1208 21:42:31.969693 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:42:31 crc kubenswrapper[4791]: I1208 21:42:31.994946 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pmmmw" podStartSLOduration=4.265006457 podStartE2EDuration="9.994926628s" podCreationTimestamp="2025-12-08 21:42:22 +0000 UTC" firstStartedPulling="2025-12-08 21:42:25.297184494 +0000 UTC m=+1421.995942839" lastFinishedPulling="2025-12-08 21:42:31.027104665 +0000 UTC m=+1427.725863010" observedRunningTime="2025-12-08 21:42:31.446740662 +0000 UTC m=+1428.145499017" watchObservedRunningTime="2025-12-08 21:42:31.994926628 +0000 UTC m=+1428.693684983" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.016145 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-sb\") pod \"4ad73e16-b73d-48d2-9968-934d17c0dea1\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.016241 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-config\") pod \"4ad73e16-b73d-48d2-9968-934d17c0dea1\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.016315 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-nb\") pod \"4ad73e16-b73d-48d2-9968-934d17c0dea1\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.016385 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm9sx\" (UniqueName: \"kubernetes.io/projected/4ad73e16-b73d-48d2-9968-934d17c0dea1-kube-api-access-wm9sx\") pod \"4ad73e16-b73d-48d2-9968-934d17c0dea1\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.016490 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-dns-svc\") pod \"4ad73e16-b73d-48d2-9968-934d17c0dea1\" (UID: \"4ad73e16-b73d-48d2-9968-934d17c0dea1\") " Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.029940 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ad73e16-b73d-48d2-9968-934d17c0dea1-kube-api-access-wm9sx" (OuterVolumeSpecName: "kube-api-access-wm9sx") pod "4ad73e16-b73d-48d2-9968-934d17c0dea1" (UID: "4ad73e16-b73d-48d2-9968-934d17c0dea1"). InnerVolumeSpecName "kube-api-access-wm9sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.078406 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4ad73e16-b73d-48d2-9968-934d17c0dea1" (UID: "4ad73e16-b73d-48d2-9968-934d17c0dea1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.084996 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4ad73e16-b73d-48d2-9968-934d17c0dea1" (UID: "4ad73e16-b73d-48d2-9968-934d17c0dea1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.098408 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4ad73e16-b73d-48d2-9968-934d17c0dea1" (UID: "4ad73e16-b73d-48d2-9968-934d17c0dea1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.117059 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-config" (OuterVolumeSpecName: "config") pod "4ad73e16-b73d-48d2-9968-934d17c0dea1" (UID: "4ad73e16-b73d-48d2-9968-934d17c0dea1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.119132 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.119158 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm9sx\" (UniqueName: \"kubernetes.io/projected/4ad73e16-b73d-48d2-9968-934d17c0dea1-kube-api-access-wm9sx\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.119173 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.119182 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.119190 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ad73e16-b73d-48d2-9968-934d17c0dea1-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.439738 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" event={"ID":"4ad73e16-b73d-48d2-9968-934d17c0dea1","Type":"ContainerDied","Data":"779888df60463190360972dcb878af1c8adab92b8d091ca4ea59aad097b7ce32"} Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.439810 4791 scope.go:117] "RemoveContainer" containerID="9235cecb8e67c24fa098fba18ee7fa46c517692f9caabd3e89d5ed8874928a35" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.439926 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-7cwhm" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.464680 4791 scope.go:117] "RemoveContainer" containerID="8bf23f9d0e1e05ac540011b43d16d434ad1c2856b16805372986ab78033b0d6f" Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.487292 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-7cwhm"] Dec 08 21:42:32 crc kubenswrapper[4791]: I1208 21:42:32.498174 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-7cwhm"] Dec 08 21:42:33 crc kubenswrapper[4791]: I1208 21:42:33.394161 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:33 crc kubenswrapper[4791]: I1208 21:42:33.394578 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:33 crc kubenswrapper[4791]: I1208 21:42:33.450880 4791 generic.go:334] "Generic (PLEG): container finished" podID="f5e36e4f-4023-4a5a-9ae3-34cde7af452d" containerID="f152d0c1aae53adec81b09eb831ae39d9dfc33aa763d64eb2b80a69b9cdddb0f" exitCode=0 Dec 08 21:42:33 crc kubenswrapper[4791]: I1208 21:42:33.450950 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xkfgv" event={"ID":"f5e36e4f-4023-4a5a-9ae3-34cde7af452d","Type":"ContainerDied","Data":"f152d0c1aae53adec81b09eb831ae39d9dfc33aa763d64eb2b80a69b9cdddb0f"} Dec 08 21:42:33 crc kubenswrapper[4791]: I1208 21:42:33.611532 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ad73e16-b73d-48d2-9968-934d17c0dea1" path="/var/lib/kubelet/pods/4ad73e16-b73d-48d2-9968-934d17c0dea1/volumes" Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.437279 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-pmmmw" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="registry-server" probeResult="failure" output=< Dec 08 21:42:34 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 21:42:34 crc kubenswrapper[4791]: > Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.463641 4791 generic.go:334] "Generic (PLEG): container finished" podID="e74124c9-f94a-4168-a9b1-dafbcb9e0f70" containerID="0e8bd64f021b879cf2a2348f728c3648af903cf50353439466651a66f3406f8a" exitCode=0 Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.463676 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rrtnb" event={"ID":"e74124c9-f94a-4168-a9b1-dafbcb9e0f70","Type":"ContainerDied","Data":"0e8bd64f021b879cf2a2348f728c3648af903cf50353439466651a66f3406f8a"} Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.925347 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.973741 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-config-data\") pod \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.973963 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbx9z\" (UniqueName: \"kubernetes.io/projected/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-kube-api-access-cbx9z\") pod \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.974035 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-combined-ca-bundle\") pod \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\" (UID: \"f5e36e4f-4023-4a5a-9ae3-34cde7af452d\") " Dec 08 21:42:34 crc kubenswrapper[4791]: I1208 21:42:34.979115 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-kube-api-access-cbx9z" (OuterVolumeSpecName: "kube-api-access-cbx9z") pod "f5e36e4f-4023-4a5a-9ae3-34cde7af452d" (UID: "f5e36e4f-4023-4a5a-9ae3-34cde7af452d"). InnerVolumeSpecName "kube-api-access-cbx9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.005997 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5e36e4f-4023-4a5a-9ae3-34cde7af452d" (UID: "f5e36e4f-4023-4a5a-9ae3-34cde7af452d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.045541 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-config-data" (OuterVolumeSpecName: "config-data") pod "f5e36e4f-4023-4a5a-9ae3-34cde7af452d" (UID: "f5e36e4f-4023-4a5a-9ae3-34cde7af452d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.076604 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbx9z\" (UniqueName: \"kubernetes.io/projected/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-kube-api-access-cbx9z\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.076648 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.076663 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e36e4f-4023-4a5a-9ae3-34cde7af452d-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.477520 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xkfgv" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.477864 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xkfgv" event={"ID":"f5e36e4f-4023-4a5a-9ae3-34cde7af452d","Type":"ContainerDied","Data":"4046cd7082748397b64cc760bf8ef654586420ec848ee17a13fe1dfa7280b53d"} Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.477905 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4046cd7082748397b64cc760bf8ef654586420ec848ee17a13fe1dfa7280b53d" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821285 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b868669f-rn55l"] Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821806 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerName="init" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821830 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerName="init" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821846 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b4f5fa-551e-4849-baf6-7afb53700f1d" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821857 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b4f5fa-551e-4849-baf6-7afb53700f1d" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821871 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="944249f8-45ae-4247-b092-54a0a081df4e" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821880 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="944249f8-45ae-4247-b092-54a0a081df4e" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821896 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerName="dnsmasq-dns" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821902 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerName="dnsmasq-dns" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821913 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1085ae8-9862-42a2-9c52-561c82c2e966" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821919 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1085ae8-9862-42a2-9c52-561c82c2e966" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821932 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceb03ddf-9eca-4760-82da-ef871c8f2af7" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821938 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceb03ddf-9eca-4760-82da-ef871c8f2af7" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821947 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55f1c8c7-116e-4f7f-9b3e-b94c44b5a755" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821952 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="55f1c8c7-116e-4f7f-9b3e-b94c44b5a755" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821968 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e35cda62-b9bc-4055-b831-2f8beb709d69" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.821974 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e35cda62-b9bc-4055-b831-2f8beb709d69" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.821998 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e36e4f-4023-4a5a-9ae3-34cde7af452d" containerName="keystone-db-sync" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822005 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e36e4f-4023-4a5a-9ae3-34cde7af452d" containerName="keystone-db-sync" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.822015 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b317f179-52ca-4d94-bd3a-c9cfd5096839" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822021 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b317f179-52ca-4d94-bd3a-c9cfd5096839" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: E1208 21:42:35.822033 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="759af777-0707-490e-87e1-6f15b83fbfa0" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822038 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="759af777-0707-490e-87e1-6f15b83fbfa0" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822216 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ad73e16-b73d-48d2-9968-934d17c0dea1" containerName="dnsmasq-dns" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822227 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e35cda62-b9bc-4055-b831-2f8beb709d69" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822239 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1085ae8-9862-42a2-9c52-561c82c2e966" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822251 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="ceb03ddf-9eca-4760-82da-ef871c8f2af7" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822297 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="944249f8-45ae-4247-b092-54a0a081df4e" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822315 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="759af777-0707-490e-87e1-6f15b83fbfa0" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822328 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="83b4f5fa-551e-4849-baf6-7afb53700f1d" containerName="mariadb-database-create" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822338 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="55f1c8c7-116e-4f7f-9b3e-b94c44b5a755" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822347 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5e36e4f-4023-4a5a-9ae3-34cde7af452d" containerName="keystone-db-sync" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.822357 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b317f179-52ca-4d94-bd3a-c9cfd5096839" containerName="mariadb-account-create-update" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.823528 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.848749 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5gx5k"] Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.850092 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.881097 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.881323 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.885840 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.885891 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.892852 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-fernet-keys\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.892920 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-scripts\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.892956 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893002 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893021 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56dvp\" (UniqueName: \"kubernetes.io/projected/eafc3e7a-7fa4-4151-895c-4181afb453a9-kube-api-access-56dvp\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893114 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893230 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-config-data\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893338 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893380 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-combined-ca-bundle\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893566 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-credential-keys\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893653 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-config\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.893698 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bxks\" (UniqueName: \"kubernetes.io/projected/5c7f1c00-3243-46d2-96d6-1483681a8906-kube-api-access-5bxks\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:35 crc kubenswrapper[4791]: I1208 21:42:35.897122 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kwdwq" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.006827 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-fernet-keys\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.006894 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-scripts\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.006924 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.006978 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007004 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56dvp\" (UniqueName: \"kubernetes.io/projected/eafc3e7a-7fa4-4151-895c-4181afb453a9-kube-api-access-56dvp\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007029 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007050 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-config-data\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007081 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007105 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-combined-ca-bundle\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007152 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-credential-keys\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007187 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-config\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.007209 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bxks\" (UniqueName: \"kubernetes.io/projected/5c7f1c00-3243-46d2-96d6-1483681a8906-kube-api-access-5bxks\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.012825 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-rn55l"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.014299 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.015520 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.015639 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.017067 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.017859 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-config\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.101722 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-combined-ca-bundle\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.102368 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-fernet-keys\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.111992 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-scripts\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.150647 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-credential-keys\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.151638 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-config-data\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.152577 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bxks\" (UniqueName: \"kubernetes.io/projected/5c7f1c00-3243-46d2-96d6-1483681a8906-kube-api-access-5bxks\") pod \"keystone-bootstrap-5gx5k\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.162492 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56dvp\" (UniqueName: \"kubernetes.io/projected/eafc3e7a-7fa4-4151-895c-4181afb453a9-kube-api-access-56dvp\") pod \"dnsmasq-dns-5b868669f-rn55l\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.164081 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5gx5k"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.185309 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.202936 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.248507 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-kfqgj"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.251415 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.264004 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-hbhtr" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.264239 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.299076 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-kfqgj"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.332155 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-779pn"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.333738 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.345976 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.346165 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-24g9s" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.346269 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.363366 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-779pn"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.379160 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-combined-ca-bundle\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.388168 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dzpg\" (UniqueName: \"kubernetes.io/projected/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-kube-api-access-7dzpg\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.388422 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-config-data\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.492561 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-config-data\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.494180 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-combined-ca-bundle\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.494326 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gkbg\" (UniqueName: \"kubernetes.io/projected/bd2c18de-3f41-4c59-b400-d96f39d28ec2-kube-api-access-5gkbg\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.495742 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-config\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.495982 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dzpg\" (UniqueName: \"kubernetes.io/projected/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-kube-api-access-7dzpg\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.496115 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-combined-ca-bundle\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.519630 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-config-data\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.522421 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-combined-ca-bundle\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.527777 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dzpg\" (UniqueName: \"kubernetes.io/projected/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-kube-api-access-7dzpg\") pod \"heat-db-sync-kfqgj\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.532741 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-5vvss"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.534600 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.540790 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.541000 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.541120 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-xbmrz" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.567324 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-hdkvl"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.569107 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.576403 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kh27z" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.576734 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.576761 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.595345 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-rn55l"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.607526 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gkbg\" (UniqueName: \"kubernetes.io/projected/bd2c18de-3f41-4c59-b400-d96f39d28ec2-kube-api-access-5gkbg\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.607628 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-config\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.607697 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-combined-ca-bundle\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.617823 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-combined-ca-bundle\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.619182 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-kfqgj" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.626847 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-config\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.629943 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5vvss"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.647534 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gkbg\" (UniqueName: \"kubernetes.io/projected/bd2c18de-3f41-4c59-b400-d96f39d28ec2-kube-api-access-5gkbg\") pod \"neutron-db-sync-779pn\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.669697 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hdkvl"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.685509 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-7srv6"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.689622 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.694382 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-78vjm" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.694766 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.710921 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-combined-ca-bundle\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.711763 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-combined-ca-bundle\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.714357 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-swt86"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.714880 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/242da563-f632-4ba3-be9e-bd7d0376120d-logs\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.714923 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-db-sync-config-data\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.714986 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-config-data\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.715148 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-scripts\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.718679 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.726133 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbpxg\" (UniqueName: \"kubernetes.io/projected/ee824c0f-2eaa-4eee-8dcf-f487d9445012-kube-api-access-xbpxg\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.726244 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-config-data\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.726328 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee824c0f-2eaa-4eee-8dcf-f487d9445012-etc-machine-id\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.726390 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gj7v\" (UniqueName: \"kubernetes.io/projected/242da563-f632-4ba3-be9e-bd7d0376120d-kube-api-access-8gj7v\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.726464 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-scripts\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.728089 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7srv6"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.737694 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-779pn" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.740771 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-swt86"] Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.745721 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rrtnb" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831678 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831751 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-combined-ca-bundle\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831807 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-config\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831834 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/242da563-f632-4ba3-be9e-bd7d0376120d-logs\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831853 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-db-sync-config-data\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831874 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-config-data\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831908 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7slx\" (UniqueName: \"kubernetes.io/projected/d9f87323-3041-444f-b26d-c76871bd426f-kube-api-access-z7slx\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-scripts\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831957 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-svc\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.831981 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbpxg\" (UniqueName: \"kubernetes.io/projected/ee824c0f-2eaa-4eee-8dcf-f487d9445012-kube-api-access-xbpxg\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832009 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-config-data\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832037 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee824c0f-2eaa-4eee-8dcf-f487d9445012-etc-machine-id\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832062 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gj7v\" (UniqueName: \"kubernetes.io/projected/242da563-f632-4ba3-be9e-bd7d0376120d-kube-api-access-8gj7v\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832104 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tgz2\" (UniqueName: \"kubernetes.io/projected/4a0201ee-a799-4831-bf05-0804b6f413b9-kube-api-access-5tgz2\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832135 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832158 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-scripts\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832196 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-combined-ca-bundle\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832244 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-db-sync-config-data\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832262 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.832285 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-combined-ca-bundle\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.839629 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/242da563-f632-4ba3-be9e-bd7d0376120d-logs\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.840140 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee824c0f-2eaa-4eee-8dcf-f487d9445012-etc-machine-id\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.851474 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-combined-ca-bundle\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.852078 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-combined-ca-bundle\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.852133 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-scripts\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.852162 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-scripts\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.857807 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-db-sync-config-data\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.858504 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-config-data\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.864088 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbpxg\" (UniqueName: \"kubernetes.io/projected/ee824c0f-2eaa-4eee-8dcf-f487d9445012-kube-api-access-xbpxg\") pod \"cinder-db-sync-5vvss\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.879815 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5vvss" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.881015 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gj7v\" (UniqueName: \"kubernetes.io/projected/242da563-f632-4ba3-be9e-bd7d0376120d-kube-api-access-8gj7v\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.881616 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-config-data\") pod \"placement-db-sync-hdkvl\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.915644 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hdkvl" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.940454 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-config-data\") pod \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.940541 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-db-sync-config-data\") pod \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.940670 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-combined-ca-bundle\") pod \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.940733 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdvfx\" (UniqueName: \"kubernetes.io/projected/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-kube-api-access-zdvfx\") pod \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\" (UID: \"e74124c9-f94a-4168-a9b1-dafbcb9e0f70\") " Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941132 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941187 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-config\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941243 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7slx\" (UniqueName: \"kubernetes.io/projected/d9f87323-3041-444f-b26d-c76871bd426f-kube-api-access-z7slx\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941278 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-svc\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941437 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tgz2\" (UniqueName: \"kubernetes.io/projected/4a0201ee-a799-4831-bf05-0804b6f413b9-kube-api-access-5tgz2\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941479 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941513 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-combined-ca-bundle\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941568 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-db-sync-config-data\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.941588 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.948737 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.949291 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.950420 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.951118 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-svc\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.960096 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-kube-api-access-zdvfx" (OuterVolumeSpecName: "kube-api-access-zdvfx") pod "e74124c9-f94a-4168-a9b1-dafbcb9e0f70" (UID: "e74124c9-f94a-4168-a9b1-dafbcb9e0f70"). InnerVolumeSpecName "kube-api-access-zdvfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.964778 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tgz2\" (UniqueName: \"kubernetes.io/projected/4a0201ee-a799-4831-bf05-0804b6f413b9-kube-api-access-5tgz2\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.965271 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-config\") pod \"dnsmasq-dns-cf78879c9-swt86\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.975700 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-db-sync-config-data\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:36 crc kubenswrapper[4791]: I1208 21:42:36.987321 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7slx\" (UniqueName: \"kubernetes.io/projected/d9f87323-3041-444f-b26d-c76871bd426f-kube-api-access-z7slx\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:36.995425 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e74124c9-f94a-4168-a9b1-dafbcb9e0f70" (UID: "e74124c9-f94a-4168-a9b1-dafbcb9e0f70"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.004144 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-combined-ca-bundle\") pod \"barbican-db-sync-7srv6\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.044756 4791 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.044857 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdvfx\" (UniqueName: \"kubernetes.io/projected/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-kube-api-access-zdvfx\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.076912 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-config-data" (OuterVolumeSpecName: "config-data") pod "e74124c9-f94a-4168-a9b1-dafbcb9e0f70" (UID: "e74124c9-f94a-4168-a9b1-dafbcb9e0f70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.077717 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7srv6" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.079347 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.089952 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-rn55l"] Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.122685 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e74124c9-f94a-4168-a9b1-dafbcb9e0f70" (UID: "e74124c9-f94a-4168-a9b1-dafbcb9e0f70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.149402 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.149442 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74124c9-f94a-4168-a9b1-dafbcb9e0f70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.258016 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5gx5k"] Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.545183 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rrtnb" event={"ID":"e74124c9-f94a-4168-a9b1-dafbcb9e0f70","Type":"ContainerDied","Data":"79afbc79ac0b0ca148d93cb10ac7591fc528c93a4a6822e3273e17fc48ec3ce9"} Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.545231 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79afbc79ac0b0ca148d93cb10ac7591fc528c93a4a6822e3273e17fc48ec3ce9" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.545269 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rrtnb" Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.548301 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-779pn"] Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.653910 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5vvss"] Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.670429 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hdkvl"] Dec 08 21:42:37 crc kubenswrapper[4791]: I1208 21:42:37.800964 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-kfqgj"] Dec 08 21:42:38 crc kubenswrapper[4791]: W1208 21:42:38.264201 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeafc3e7a_7fa4_4151_895c_4181afb453a9.slice/crio-546db1a784a65abe3c7f38bda833fb040f1bd84cd83ce95673a2651b5074c1cf WatchSource:0}: Error finding container 546db1a784a65abe3c7f38bda833fb040f1bd84cd83ce95673a2651b5074c1cf: Status 404 returned error can't find the container with id 546db1a784a65abe3c7f38bda833fb040f1bd84cd83ce95673a2651b5074c1cf Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.419013 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-swt86"] Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.511426 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-xlwz8"] Dec 08 21:42:38 crc kubenswrapper[4791]: E1208 21:42:38.518977 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e74124c9-f94a-4168-a9b1-dafbcb9e0f70" containerName="glance-db-sync" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.519025 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e74124c9-f94a-4168-a9b1-dafbcb9e0f70" containerName="glance-db-sync" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.519423 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e74124c9-f94a-4168-a9b1-dafbcb9e0f70" containerName="glance-db-sync" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.520605 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.548834 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-xlwz8"] Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.624508 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-779pn" event={"ID":"bd2c18de-3f41-4c59-b400-d96f39d28ec2","Type":"ContainerStarted","Data":"36eba8f340c164f219eb19f43a6906f552dca725b263c2866ccc07a2b3afe9c1"} Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.652614 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hdkvl" event={"ID":"242da563-f632-4ba3-be9e-bd7d0376120d","Type":"ContainerStarted","Data":"a00910334bd2ef97d82095a9fe0ba57c1ba1844e2983157a20bda02db6d6486a"} Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.669764 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-kfqgj" event={"ID":"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e","Type":"ContainerStarted","Data":"745e27ac504a5c4c3d5a5f531469d4ba4667837ea90a0294f9ae31b72ef5595a"} Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.677603 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5gx5k" event={"ID":"5c7f1c00-3243-46d2-96d6-1483681a8906","Type":"ContainerStarted","Data":"10176e523f97f54727988bde31ade6d3184d0cf0b78f51c87c615eb5b75945b3"} Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.694501 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.694638 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.694692 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-config\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.698998 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.699491 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m849h\" (UniqueName: \"kubernetes.io/projected/74852b39-364a-40e3-8ed8-a24178ee0403-kube-api-access-m849h\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.699582 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.711571 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5vvss" event={"ID":"ee824c0f-2eaa-4eee-8dcf-f487d9445012","Type":"ContainerStarted","Data":"f9475771d78b43284416cad40c760485d5ca6cf6ca45e18bcf03ccf08450acab"} Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.717928 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-rn55l" event={"ID":"eafc3e7a-7fa4-4151-895c-4181afb453a9","Type":"ContainerStarted","Data":"546db1a784a65abe3c7f38bda833fb040f1bd84cd83ce95673a2651b5074c1cf"} Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.805077 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m849h\" (UniqueName: \"kubernetes.io/projected/74852b39-364a-40e3-8ed8-a24178ee0403-kube-api-access-m849h\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.805896 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.806139 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.806444 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.806661 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-config\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.806849 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.815509 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.816074 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.822902 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.823036 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-config\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.823479 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.852854 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m849h\" (UniqueName: \"kubernetes.io/projected/74852b39-364a-40e3-8ed8-a24178ee0403-kube-api-access-m849h\") pod \"dnsmasq-dns-56df8fb6b7-xlwz8\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.882910 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.913685 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.919470 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.934668 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mzhj6" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.935388 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.935564 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 08 21:42:38 crc kubenswrapper[4791]: I1208 21:42:38.949339 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.013125 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-logs\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.013239 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rztjw\" (UniqueName: \"kubernetes.io/projected/50f29756-1345-4c4c-a9f5-bb817fcf0015-kube-api-access-rztjw\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.013345 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.013646 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.013725 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-scripts\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.013955 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.013987 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-config-data\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.104260 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.107022 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.109560 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.121025 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.121075 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-config-data\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.121115 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-logs\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.121176 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rztjw\" (UniqueName: \"kubernetes.io/projected/50f29756-1345-4c4c-a9f5-bb817fcf0015-kube-api-access-rztjw\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.123261 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.123385 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.123423 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-scripts\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.124144 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-logs\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.129364 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.130334 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.150739 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-scripts\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.151625 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.151744 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2b1f4bc6b3ef864632668d1177fe0017bb99c34b82e704a930801276839aa0f1/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.172073 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-config-data\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.179027 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rztjw\" (UniqueName: \"kubernetes.io/projected/50f29756-1345-4c4c-a9f5-bb817fcf0015-kube-api-access-rztjw\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.180137 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:39 crc kubenswrapper[4791]: W1208 21:42:39.200079 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a0201ee_a799_4831_bf05_0804b6f413b9.slice/crio-c649a4cfa963a4950b163494bf764d73f66cad58c755fc7ad5c9fcf053e265b9 WatchSource:0}: Error finding container c649a4cfa963a4950b163494bf764d73f66cad58c755fc7ad5c9fcf053e265b9: Status 404 returned error can't find the container with id c649a4cfa963a4950b163494bf764d73f66cad58c755fc7ad5c9fcf053e265b9 Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.228031 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-swt86"] Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.228224 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.228376 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.228544 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-logs\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.228704 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.229147 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.229196 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdxqd\" (UniqueName: \"kubernetes.io/projected/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-kube-api-access-cdxqd\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.229305 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.304648 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.332859 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.332908 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.332961 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.333015 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-logs\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.333046 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.333120 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.333149 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdxqd\" (UniqueName: \"kubernetes.io/projected/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-kube-api-access-cdxqd\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.334054 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7srv6"] Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.334456 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-logs\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.336277 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.340002 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.340865 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.342553 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.349386 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.349444 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b6e75dc9bbe8f5543873ba1da9c9ef2677b6e25aaa3d050091b399bf8011b4a0/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.353404 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdxqd\" (UniqueName: \"kubernetes.io/projected/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-kube-api-access-cdxqd\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.385220 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.427955 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.704195 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.713042 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-xlwz8"] Dec 08 21:42:39 crc kubenswrapper[4791]: W1208 21:42:39.716581 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74852b39_364a_40e3_8ed8_a24178ee0403.slice/crio-a46ae09220334b3227ac60e278499e4f277585010c3577f7a49646f0f3a3d0d1 WatchSource:0}: Error finding container a46ae09220334b3227ac60e278499e4f277585010c3577f7a49646f0f3a3d0d1: Status 404 returned error can't find the container with id a46ae09220334b3227ac60e278499e4f277585010c3577f7a49646f0f3a3d0d1 Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.750203 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5gx5k" event={"ID":"5c7f1c00-3243-46d2-96d6-1483681a8906","Type":"ContainerStarted","Data":"28f3ae21f34970db2a35f9c59baa5bce65552ee05ceab28549c32f60c93f9456"} Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.764812 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" event={"ID":"74852b39-364a-40e3-8ed8-a24178ee0403","Type":"ContainerStarted","Data":"a46ae09220334b3227ac60e278499e4f277585010c3577f7a49646f0f3a3d0d1"} Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.774368 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5gx5k" podStartSLOduration=4.7743513669999995 podStartE2EDuration="4.774351367s" podCreationTimestamp="2025-12-08 21:42:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:39.772556602 +0000 UTC m=+1436.471314947" watchObservedRunningTime="2025-12-08 21:42:39.774351367 +0000 UTC m=+1436.473109712" Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.783394 4791 generic.go:334] "Generic (PLEG): container finished" podID="eafc3e7a-7fa4-4151-895c-4181afb453a9" containerID="4310bcebf298370dc24b19577d83101f64cb9f35c8dfe3231f3b5af2877ca885" exitCode=0 Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.783473 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-rn55l" event={"ID":"eafc3e7a-7fa4-4151-895c-4181afb453a9","Type":"ContainerDied","Data":"4310bcebf298370dc24b19577d83101f64cb9f35c8dfe3231f3b5af2877ca885"} Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.804429 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-swt86" event={"ID":"4a0201ee-a799-4831-bf05-0804b6f413b9","Type":"ContainerStarted","Data":"c649a4cfa963a4950b163494bf764d73f66cad58c755fc7ad5c9fcf053e265b9"} Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.867352 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-779pn" event={"ID":"bd2c18de-3f41-4c59-b400-d96f39d28ec2","Type":"ContainerStarted","Data":"fb861b6e2cc7f2c3e139c059bc20dfd39ee6ec7bfb8586207b3f23753a0d3e4e"} Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.878727 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7srv6" event={"ID":"d9f87323-3041-444f-b26d-c76871bd426f","Type":"ContainerStarted","Data":"ecc83e26f6e43fbdf35da3007c13777e6c70822e74909d6f70d2099499b42196"} Dec 08 21:42:39 crc kubenswrapper[4791]: I1208 21:42:39.929225 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-779pn" podStartSLOduration=3.929205574 podStartE2EDuration="3.929205574s" podCreationTimestamp="2025-12-08 21:42:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:39.927250406 +0000 UTC m=+1436.626008761" watchObservedRunningTime="2025-12-08 21:42:39.929205574 +0000 UTC m=+1436.627963919" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.185680 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.620995 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.629764 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:40 crc kubenswrapper[4791]: W1208 21:42:40.711040 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod291eb1df_ac29_4217_9e9b_2a46b6fabfe2.slice/crio-216e830f770e5e68431075d9dceedf30b12b825051cc9769ca33ff596eaf8829 WatchSource:0}: Error finding container 216e830f770e5e68431075d9dceedf30b12b825051cc9769ca33ff596eaf8829: Status 404 returned error can't find the container with id 216e830f770e5e68431075d9dceedf30b12b825051cc9769ca33ff596eaf8829 Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.775782 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-sb\") pod \"eafc3e7a-7fa4-4151-895c-4181afb453a9\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.776668 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc\") pod \"eafc3e7a-7fa4-4151-895c-4181afb453a9\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.776728 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56dvp\" (UniqueName: \"kubernetes.io/projected/eafc3e7a-7fa4-4151-895c-4181afb453a9-kube-api-access-56dvp\") pod \"eafc3e7a-7fa4-4151-895c-4181afb453a9\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.776764 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-config\") pod \"eafc3e7a-7fa4-4151-895c-4181afb453a9\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.776838 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-swift-storage-0\") pod \"eafc3e7a-7fa4-4151-895c-4181afb453a9\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.777142 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-nb\") pod \"eafc3e7a-7fa4-4151-895c-4181afb453a9\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.811114 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eafc3e7a-7fa4-4151-895c-4181afb453a9-kube-api-access-56dvp" (OuterVolumeSpecName: "kube-api-access-56dvp") pod "eafc3e7a-7fa4-4151-895c-4181afb453a9" (UID: "eafc3e7a-7fa4-4151-895c-4181afb453a9"). InnerVolumeSpecName "kube-api-access-56dvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.815117 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eafc3e7a-7fa4-4151-895c-4181afb453a9" (UID: "eafc3e7a-7fa4-4151-895c-4181afb453a9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.820695 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-config" (OuterVolumeSpecName: "config") pod "eafc3e7a-7fa4-4151-895c-4181afb453a9" (UID: "eafc3e7a-7fa4-4151-895c-4181afb453a9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.835549 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "eafc3e7a-7fa4-4151-895c-4181afb453a9" (UID: "eafc3e7a-7fa4-4151-895c-4181afb453a9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:40 crc kubenswrapper[4791]: E1208 21:42:40.853050 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc podName:eafc3e7a-7fa4-4151-895c-4181afb453a9 nodeName:}" failed. No retries permitted until 2025-12-08 21:42:41.353022916 +0000 UTC m=+1438.051781261 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc") pod "eafc3e7a-7fa4-4151-895c-4181afb453a9" (UID: "eafc3e7a-7fa4-4151-895c-4181afb453a9") : error deleting /var/lib/kubelet/pods/eafc3e7a-7fa4-4151-895c-4181afb453a9/volume-subpaths: remove /var/lib/kubelet/pods/eafc3e7a-7fa4-4151-895c-4181afb453a9/volume-subpaths: no such file or directory Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.853498 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eafc3e7a-7fa4-4151-895c-4181afb453a9" (UID: "eafc3e7a-7fa4-4151-895c-4181afb453a9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.882641 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.882684 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.882694 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56dvp\" (UniqueName: \"kubernetes.io/projected/eafc3e7a-7fa4-4151-895c-4181afb453a9-kube-api-access-56dvp\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.882718 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.882728 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.893824 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"291eb1df-ac29-4217-9e9b-2a46b6fabfe2","Type":"ContainerStarted","Data":"216e830f770e5e68431075d9dceedf30b12b825051cc9769ca33ff596eaf8829"} Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.896863 4791 generic.go:334] "Generic (PLEG): container finished" podID="74852b39-364a-40e3-8ed8-a24178ee0403" containerID="5574f52dc2ccd067a086cb88440dcf6edf8a3e09c924dabad0698cfd82165c6a" exitCode=0 Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.896930 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" event={"ID":"74852b39-364a-40e3-8ed8-a24178ee0403","Type":"ContainerDied","Data":"5574f52dc2ccd067a086cb88440dcf6edf8a3e09c924dabad0698cfd82165c6a"} Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.915945 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-rn55l" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.915956 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-rn55l" event={"ID":"eafc3e7a-7fa4-4151-895c-4181afb453a9","Type":"ContainerDied","Data":"546db1a784a65abe3c7f38bda833fb040f1bd84cd83ce95673a2651b5074c1cf"} Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.916512 4791 scope.go:117] "RemoveContainer" containerID="4310bcebf298370dc24b19577d83101f64cb9f35c8dfe3231f3b5af2877ca885" Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.935405 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"50f29756-1345-4c4c-a9f5-bb817fcf0015","Type":"ContainerStarted","Data":"04ab95c4d64a883785f2300dfcdf40ccffdb1718f75a2b404eec50f98fd49abd"} Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.939511 4791 generic.go:334] "Generic (PLEG): container finished" podID="4a0201ee-a799-4831-bf05-0804b6f413b9" containerID="282942b5e443398d4309cb48c50e98e6ca5650185fccc7ca55caafefd6294583" exitCode=0 Dec 08 21:42:40 crc kubenswrapper[4791]: I1208 21:42:40.941443 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-swt86" event={"ID":"4a0201ee-a799-4831-bf05-0804b6f413b9","Type":"ContainerDied","Data":"282942b5e443398d4309cb48c50e98e6ca5650185fccc7ca55caafefd6294583"} Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.407593 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc\") pod \"eafc3e7a-7fa4-4151-895c-4181afb453a9\" (UID: \"eafc3e7a-7fa4-4151-895c-4181afb453a9\") " Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.409065 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eafc3e7a-7fa4-4151-895c-4181afb453a9" (UID: "eafc3e7a-7fa4-4151-895c-4181afb453a9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.505343 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.510111 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eafc3e7a-7fa4-4151-895c-4181afb453a9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.612194 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-svc\") pod \"4a0201ee-a799-4831-bf05-0804b6f413b9\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.612339 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-config\") pod \"4a0201ee-a799-4831-bf05-0804b6f413b9\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.612390 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tgz2\" (UniqueName: \"kubernetes.io/projected/4a0201ee-a799-4831-bf05-0804b6f413b9-kube-api-access-5tgz2\") pod \"4a0201ee-a799-4831-bf05-0804b6f413b9\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.612446 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-sb\") pod \"4a0201ee-a799-4831-bf05-0804b6f413b9\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.612565 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-nb\") pod \"4a0201ee-a799-4831-bf05-0804b6f413b9\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.612590 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-swift-storage-0\") pod \"4a0201ee-a799-4831-bf05-0804b6f413b9\" (UID: \"4a0201ee-a799-4831-bf05-0804b6f413b9\") " Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.617015 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a0201ee-a799-4831-bf05-0804b6f413b9-kube-api-access-5tgz2" (OuterVolumeSpecName: "kube-api-access-5tgz2") pod "4a0201ee-a799-4831-bf05-0804b6f413b9" (UID: "4a0201ee-a799-4831-bf05-0804b6f413b9"). InnerVolumeSpecName "kube-api-access-5tgz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.646922 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-config" (OuterVolumeSpecName: "config") pod "4a0201ee-a799-4831-bf05-0804b6f413b9" (UID: "4a0201ee-a799-4831-bf05-0804b6f413b9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.660258 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4a0201ee-a799-4831-bf05-0804b6f413b9" (UID: "4a0201ee-a799-4831-bf05-0804b6f413b9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.669161 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4a0201ee-a799-4831-bf05-0804b6f413b9" (UID: "4a0201ee-a799-4831-bf05-0804b6f413b9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.686384 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-rn55l"] Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.686419 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-rn55l"] Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.686820 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4a0201ee-a799-4831-bf05-0804b6f413b9" (UID: "4a0201ee-a799-4831-bf05-0804b6f413b9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.688961 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4a0201ee-a799-4831-bf05-0804b6f413b9" (UID: "4a0201ee-a799-4831-bf05-0804b6f413b9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.718009 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.718047 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.718060 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.718075 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.718089 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a0201ee-a799-4831-bf05-0804b6f413b9-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.718112 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tgz2\" (UniqueName: \"kubernetes.io/projected/4a0201ee-a799-4831-bf05-0804b6f413b9-kube-api-access-5tgz2\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.966743 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" event={"ID":"74852b39-364a-40e3-8ed8-a24178ee0403","Type":"ContainerStarted","Data":"c8ac760dcd8e06f76c056d88cb5383c05c088e0c91a71a268c06064ee5cc600b"} Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.968236 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:41 crc kubenswrapper[4791]: I1208 21:42:41.995460 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"50f29756-1345-4c4c-a9f5-bb817fcf0015","Type":"ContainerStarted","Data":"5b51599cc33a3b8ffc146567886d28e282429698416af83a86f193436bb8c9fc"} Dec 08 21:42:42 crc kubenswrapper[4791]: I1208 21:42:42.000692 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-swt86" event={"ID":"4a0201ee-a799-4831-bf05-0804b6f413b9","Type":"ContainerDied","Data":"c649a4cfa963a4950b163494bf764d73f66cad58c755fc7ad5c9fcf053e265b9"} Dec 08 21:42:42 crc kubenswrapper[4791]: I1208 21:42:42.000759 4791 scope.go:117] "RemoveContainer" containerID="282942b5e443398d4309cb48c50e98e6ca5650185fccc7ca55caafefd6294583" Dec 08 21:42:42 crc kubenswrapper[4791]: I1208 21:42:42.001146 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-swt86" Dec 08 21:42:42 crc kubenswrapper[4791]: I1208 21:42:42.003905 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" podStartSLOduration=4.003888211 podStartE2EDuration="4.003888211s" podCreationTimestamp="2025-12-08 21:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:41.994252234 +0000 UTC m=+1438.693010579" watchObservedRunningTime="2025-12-08 21:42:42.003888211 +0000 UTC m=+1438.702646556" Dec 08 21:42:42 crc kubenswrapper[4791]: I1208 21:42:42.338920 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-swt86"] Dec 08 21:42:42 crc kubenswrapper[4791]: I1208 21:42:42.373977 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-swt86"] Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.107750 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"50f29756-1345-4c4c-a9f5-bb817fcf0015","Type":"ContainerStarted","Data":"6c4917b79e37fb87f6cb73ac752adc8eb89e4b0e2a75cccd4d48893b01c44fe5"} Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.147643 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"291eb1df-ac29-4217-9e9b-2a46b6fabfe2","Type":"ContainerStarted","Data":"f355ff51af3e60569527fccdacbea63b4b9b26033b20878391bfac6ddacf84ad"} Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.147698 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"291eb1df-ac29-4217-9e9b-2a46b6fabfe2","Type":"ContainerStarted","Data":"30f52b9f0202b8dc81727d5274491ad9f7c4275000853d29e7f2b3758fd29332"} Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.164008 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.163986511 podStartE2EDuration="5.163986511s" podCreationTimestamp="2025-12-08 21:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:43.155265557 +0000 UTC m=+1439.854024032" watchObservedRunningTime="2025-12-08 21:42:43.163986511 +0000 UTC m=+1439.862744856" Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.188868 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.188850622 podStartE2EDuration="4.188850622s" podCreationTimestamp="2025-12-08 21:42:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:43.181135603 +0000 UTC m=+1439.879893948" watchObservedRunningTime="2025-12-08 21:42:43.188850622 +0000 UTC m=+1439.887608967" Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.471375 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.555534 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.627129 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a0201ee-a799-4831-bf05-0804b6f413b9" path="/var/lib/kubelet/pods/4a0201ee-a799-4831-bf05-0804b6f413b9/volumes" Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.627938 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eafc3e7a-7fa4-4151-895c-4181afb453a9" path="/var/lib/kubelet/pods/eafc3e7a-7fa4-4151-895c-4181afb453a9/volumes" Dec 08 21:42:43 crc kubenswrapper[4791]: I1208 21:42:43.723085 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pmmmw"] Dec 08 21:42:45 crc kubenswrapper[4791]: I1208 21:42:45.173998 4791 generic.go:334] "Generic (PLEG): container finished" podID="5c7f1c00-3243-46d2-96d6-1483681a8906" containerID="28f3ae21f34970db2a35f9c59baa5bce65552ee05ceab28549c32f60c93f9456" exitCode=0 Dec 08 21:42:45 crc kubenswrapper[4791]: I1208 21:42:45.174127 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5gx5k" event={"ID":"5c7f1c00-3243-46d2-96d6-1483681a8906","Type":"ContainerDied","Data":"28f3ae21f34970db2a35f9c59baa5bce65552ee05ceab28549c32f60c93f9456"} Dec 08 21:42:45 crc kubenswrapper[4791]: I1208 21:42:45.174431 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pmmmw" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="registry-server" containerID="cri-o://f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870" gracePeriod=2 Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.204921 4791 generic.go:334] "Generic (PLEG): container finished" podID="d9780822-bcb2-4576-a005-16807709c69b" containerID="f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870" exitCode=0 Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.205001 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmmmw" event={"ID":"d9780822-bcb2-4576-a005-16807709c69b","Type":"ContainerDied","Data":"f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870"} Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.221336 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.221567 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-log" containerID="cri-o://5b51599cc33a3b8ffc146567886d28e282429698416af83a86f193436bb8c9fc" gracePeriod=30 Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.221844 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-httpd" containerID="cri-o://6c4917b79e37fb87f6cb73ac752adc8eb89e4b0e2a75cccd4d48893b01c44fe5" gracePeriod=30 Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.290644 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.291191 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-log" containerID="cri-o://30f52b9f0202b8dc81727d5274491ad9f7c4275000853d29e7f2b3758fd29332" gracePeriod=30 Dec 08 21:42:46 crc kubenswrapper[4791]: I1208 21:42:46.291325 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-httpd" containerID="cri-o://f355ff51af3e60569527fccdacbea63b4b9b26033b20878391bfac6ddacf84ad" gracePeriod=30 Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.223246 4791 generic.go:334] "Generic (PLEG): container finished" podID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerID="f355ff51af3e60569527fccdacbea63b4b9b26033b20878391bfac6ddacf84ad" exitCode=0 Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.223623 4791 generic.go:334] "Generic (PLEG): container finished" podID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerID="30f52b9f0202b8dc81727d5274491ad9f7c4275000853d29e7f2b3758fd29332" exitCode=143 Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.223440 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"291eb1df-ac29-4217-9e9b-2a46b6fabfe2","Type":"ContainerDied","Data":"f355ff51af3e60569527fccdacbea63b4b9b26033b20878391bfac6ddacf84ad"} Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.223945 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"291eb1df-ac29-4217-9e9b-2a46b6fabfe2","Type":"ContainerDied","Data":"30f52b9f0202b8dc81727d5274491ad9f7c4275000853d29e7f2b3758fd29332"} Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.227799 4791 generic.go:334] "Generic (PLEG): container finished" podID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerID="6c4917b79e37fb87f6cb73ac752adc8eb89e4b0e2a75cccd4d48893b01c44fe5" exitCode=0 Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.227820 4791 generic.go:334] "Generic (PLEG): container finished" podID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerID="5b51599cc33a3b8ffc146567886d28e282429698416af83a86f193436bb8c9fc" exitCode=143 Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.227838 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"50f29756-1345-4c4c-a9f5-bb817fcf0015","Type":"ContainerDied","Data":"6c4917b79e37fb87f6cb73ac752adc8eb89e4b0e2a75cccd4d48893b01c44fe5"} Dec 08 21:42:47 crc kubenswrapper[4791]: I1208 21:42:47.227858 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"50f29756-1345-4c4c-a9f5-bb817fcf0015","Type":"ContainerDied","Data":"5b51599cc33a3b8ffc146567886d28e282429698416af83a86f193436bb8c9fc"} Dec 08 21:42:48 crc kubenswrapper[4791]: I1208 21:42:48.885539 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:42:48 crc kubenswrapper[4791]: I1208 21:42:48.947841 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-m7h8z"] Dec 08 21:42:48 crc kubenswrapper[4791]: I1208 21:42:48.948140 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" containerID="cri-o://6ae75eb34657f585e1939faacaa48865a1916b8e299a99eefbff873707b6af10" gracePeriod=10 Dec 08 21:42:49 crc kubenswrapper[4791]: I1208 21:42:49.307905 4791 generic.go:334] "Generic (PLEG): container finished" podID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerID="6ae75eb34657f585e1939faacaa48865a1916b8e299a99eefbff873707b6af10" exitCode=0 Dec 08 21:42:49 crc kubenswrapper[4791]: I1208 21:42:49.307963 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" event={"ID":"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f","Type":"ContainerDied","Data":"6ae75eb34657f585e1939faacaa48865a1916b8e299a99eefbff873707b6af10"} Dec 08 21:42:50 crc kubenswrapper[4791]: I1208 21:42:50.789570 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: connect: connection refused" Dec 08 21:42:53 crc kubenswrapper[4791]: E1208 21:42:53.395867 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870 is running failed: container process not found" containerID="f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870" cmd=["grpc_health_probe","-addr=:50051"] Dec 08 21:42:53 crc kubenswrapper[4791]: E1208 21:42:53.396435 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870 is running failed: container process not found" containerID="f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870" cmd=["grpc_health_probe","-addr=:50051"] Dec 08 21:42:53 crc kubenswrapper[4791]: E1208 21:42:53.396647 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870 is running failed: container process not found" containerID="f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870" cmd=["grpc_health_probe","-addr=:50051"] Dec 08 21:42:53 crc kubenswrapper[4791]: E1208 21:42:53.396682 4791 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-pmmmw" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="registry-server" Dec 08 21:42:55 crc kubenswrapper[4791]: I1208 21:42:55.790240 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: connect: connection refused" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.663041 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.679516 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.685875 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.783921 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rztjw\" (UniqueName: \"kubernetes.io/projected/50f29756-1345-4c4c-a9f5-bb817fcf0015-kube-api-access-rztjw\") pod \"50f29756-1345-4c4c-a9f5-bb817fcf0015\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784025 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-config-data\") pod \"50f29756-1345-4c4c-a9f5-bb817fcf0015\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784232 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"50f29756-1345-4c4c-a9f5-bb817fcf0015\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784261 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-credential-keys\") pod \"5c7f1c00-3243-46d2-96d6-1483681a8906\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784289 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-logs\") pod \"50f29756-1345-4c4c-a9f5-bb817fcf0015\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784311 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-config-data\") pod \"5c7f1c00-3243-46d2-96d6-1483681a8906\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784330 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-combined-ca-bundle\") pod \"5c7f1c00-3243-46d2-96d6-1483681a8906\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784373 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-combined-ca-bundle\") pod \"50f29756-1345-4c4c-a9f5-bb817fcf0015\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784389 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-httpd-run\") pod \"50f29756-1345-4c4c-a9f5-bb817fcf0015\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784455 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bxks\" (UniqueName: \"kubernetes.io/projected/5c7f1c00-3243-46d2-96d6-1483681a8906-kube-api-access-5bxks\") pod \"5c7f1c00-3243-46d2-96d6-1483681a8906\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784487 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-catalog-content\") pod \"d9780822-bcb2-4576-a005-16807709c69b\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784551 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-fernet-keys\") pod \"5c7f1c00-3243-46d2-96d6-1483681a8906\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784578 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-scripts\") pod \"5c7f1c00-3243-46d2-96d6-1483681a8906\" (UID: \"5c7f1c00-3243-46d2-96d6-1483681a8906\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784630 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59th7\" (UniqueName: \"kubernetes.io/projected/d9780822-bcb2-4576-a005-16807709c69b-kube-api-access-59th7\") pod \"d9780822-bcb2-4576-a005-16807709c69b\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784720 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-scripts\") pod \"50f29756-1345-4c4c-a9f5-bb817fcf0015\" (UID: \"50f29756-1345-4c4c-a9f5-bb817fcf0015\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.784749 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-utilities\") pod \"d9780822-bcb2-4576-a005-16807709c69b\" (UID: \"d9780822-bcb2-4576-a005-16807709c69b\") " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.787029 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-utilities" (OuterVolumeSpecName: "utilities") pod "d9780822-bcb2-4576-a005-16807709c69b" (UID: "d9780822-bcb2-4576-a005-16807709c69b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.787348 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "50f29756-1345-4c4c-a9f5-bb817fcf0015" (UID: "50f29756-1345-4c4c-a9f5-bb817fcf0015"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.788499 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-logs" (OuterVolumeSpecName: "logs") pod "50f29756-1345-4c4c-a9f5-bb817fcf0015" (UID: "50f29756-1345-4c4c-a9f5-bb817fcf0015"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.794471 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c7f1c00-3243-46d2-96d6-1483681a8906-kube-api-access-5bxks" (OuterVolumeSpecName: "kube-api-access-5bxks") pod "5c7f1c00-3243-46d2-96d6-1483681a8906" (UID: "5c7f1c00-3243-46d2-96d6-1483681a8906"). InnerVolumeSpecName "kube-api-access-5bxks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.796329 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5c7f1c00-3243-46d2-96d6-1483681a8906" (UID: "5c7f1c00-3243-46d2-96d6-1483681a8906"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.797360 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9780822-bcb2-4576-a005-16807709c69b-kube-api-access-59th7" (OuterVolumeSpecName: "kube-api-access-59th7") pod "d9780822-bcb2-4576-a005-16807709c69b" (UID: "d9780822-bcb2-4576-a005-16807709c69b"). InnerVolumeSpecName "kube-api-access-59th7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.801370 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50f29756-1345-4c4c-a9f5-bb817fcf0015-kube-api-access-rztjw" (OuterVolumeSpecName: "kube-api-access-rztjw") pod "50f29756-1345-4c4c-a9f5-bb817fcf0015" (UID: "50f29756-1345-4c4c-a9f5-bb817fcf0015"). InnerVolumeSpecName "kube-api-access-rztjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.802298 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-scripts" (OuterVolumeSpecName: "scripts") pod "50f29756-1345-4c4c-a9f5-bb817fcf0015" (UID: "50f29756-1345-4c4c-a9f5-bb817fcf0015"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.806944 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-scripts" (OuterVolumeSpecName: "scripts") pod "5c7f1c00-3243-46d2-96d6-1483681a8906" (UID: "5c7f1c00-3243-46d2-96d6-1483681a8906"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.820473 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6" (OuterVolumeSpecName: "glance") pod "50f29756-1345-4c4c-a9f5-bb817fcf0015" (UID: "50f29756-1345-4c4c-a9f5-bb817fcf0015"). InnerVolumeSpecName "pvc-fd7473b6-a721-4577-93f7-bea6228887e6". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.870338 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5c7f1c00-3243-46d2-96d6-1483681a8906" (UID: "5c7f1c00-3243-46d2-96d6-1483681a8906"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.874626 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9780822-bcb2-4576-a005-16807709c69b" (UID: "d9780822-bcb2-4576-a005-16807709c69b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887033 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bxks\" (UniqueName: \"kubernetes.io/projected/5c7f1c00-3243-46d2-96d6-1483681a8906-kube-api-access-5bxks\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887082 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887097 4791 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887111 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887123 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59th7\" (UniqueName: \"kubernetes.io/projected/d9780822-bcb2-4576-a005-16807709c69b-kube-api-access-59th7\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887135 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887145 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9780822-bcb2-4576-a005-16807709c69b-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887156 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rztjw\" (UniqueName: \"kubernetes.io/projected/50f29756-1345-4c4c-a9f5-bb817fcf0015-kube-api-access-rztjw\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887198 4791 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") on node \"crc\" " Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887214 4791 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887226 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.887237 4791 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50f29756-1345-4c4c-a9f5-bb817fcf0015-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.895289 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-config-data" (OuterVolumeSpecName: "config-data") pod "5c7f1c00-3243-46d2-96d6-1483681a8906" (UID: "5c7f1c00-3243-46d2-96d6-1483681a8906"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.900610 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-config-data" (OuterVolumeSpecName: "config-data") pod "50f29756-1345-4c4c-a9f5-bb817fcf0015" (UID: "50f29756-1345-4c4c-a9f5-bb817fcf0015"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.908770 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c7f1c00-3243-46d2-96d6-1483681a8906" (UID: "5c7f1c00-3243-46d2-96d6-1483681a8906"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.941590 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50f29756-1345-4c4c-a9f5-bb817fcf0015" (UID: "50f29756-1345-4c4c-a9f5-bb817fcf0015"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.941746 4791 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.941980 4791 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-fd7473b6-a721-4577-93f7-bea6228887e6" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6") on node "crc" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.989728 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.990486 4791 reconciler_common.go:293] "Volume detached for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.990574 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.990655 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7f1c00-3243-46d2-96d6-1483681a8906-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:57 crc kubenswrapper[4791]: I1208 21:42:57.990785 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f29756-1345-4c4c-a9f5-bb817fcf0015-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.417590 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"50f29756-1345-4c4c-a9f5-bb817fcf0015","Type":"ContainerDied","Data":"04ab95c4d64a883785f2300dfcdf40ccffdb1718f75a2b404eec50f98fd49abd"} Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.417629 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.417639 4791 scope.go:117] "RemoveContainer" containerID="6c4917b79e37fb87f6cb73ac752adc8eb89e4b0e2a75cccd4d48893b01c44fe5" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.422473 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmmmw" event={"ID":"d9780822-bcb2-4576-a005-16807709c69b","Type":"ContainerDied","Data":"3650278cfe9686d4be1cece26b0f127edeb822892a26466360519eb4d2e5cc0b"} Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.422553 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmmmw" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.427139 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5gx5k" event={"ID":"5c7f1c00-3243-46d2-96d6-1483681a8906","Type":"ContainerDied","Data":"10176e523f97f54727988bde31ade6d3184d0cf0b78f51c87c615eb5b75945b3"} Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.427176 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10176e523f97f54727988bde31ade6d3184d0cf0b78f51c87c615eb5b75945b3" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.427225 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5gx5k" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.476829 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pmmmw"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.490268 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pmmmw"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.503143 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.521528 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.536568 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537411 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-httpd" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537430 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-httpd" Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537441 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="extract-content" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537448 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="extract-content" Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537464 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eafc3e7a-7fa4-4151-895c-4181afb453a9" containerName="init" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537470 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="eafc3e7a-7fa4-4151-895c-4181afb453a9" containerName="init" Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537491 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a0201ee-a799-4831-bf05-0804b6f413b9" containerName="init" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537497 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a0201ee-a799-4831-bf05-0804b6f413b9" containerName="init" Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537521 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="extract-utilities" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537530 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="extract-utilities" Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537550 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="registry-server" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537556 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="registry-server" Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537585 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7f1c00-3243-46d2-96d6-1483681a8906" containerName="keystone-bootstrap" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537591 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7f1c00-3243-46d2-96d6-1483681a8906" containerName="keystone-bootstrap" Dec 08 21:42:58 crc kubenswrapper[4791]: E1208 21:42:58.537609 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-log" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537615 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-log" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537944 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a0201ee-a799-4831-bf05-0804b6f413b9" containerName="init" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537963 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c7f1c00-3243-46d2-96d6-1483681a8906" containerName="keystone-bootstrap" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537974 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-httpd" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.537986 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" containerName="glance-log" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.538007 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9780822-bcb2-4576-a005-16807709c69b" containerName="registry-server" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.538015 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="eafc3e7a-7fa4-4151-895c-4181afb453a9" containerName="init" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.575560 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.576144 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.583438 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.584184 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.705327 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.705409 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.705439 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-scripts\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.705600 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.705867 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.706159 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkqxm\" (UniqueName: \"kubernetes.io/projected/477be1bd-91b3-46cb-ac8d-8e1bd8242066-kube-api-access-pkqxm\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.706298 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-config-data\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.706345 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-logs\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816347 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816405 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-scripts\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816453 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816500 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816547 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkqxm\" (UniqueName: \"kubernetes.io/projected/477be1bd-91b3-46cb-ac8d-8e1bd8242066-kube-api-access-pkqxm\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816584 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-config-data\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816611 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-logs\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.816744 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.819269 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.821482 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.827912 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-scripts\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.828470 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-logs\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.828661 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-config-data\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.828932 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.829393 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5gx5k"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.834203 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.834242 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2b1f4bc6b3ef864632668d1177fe0017bb99c34b82e704a930801276839aa0f1/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.841534 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5gx5k"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.853335 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkqxm\" (UniqueName: \"kubernetes.io/projected/477be1bd-91b3-46cb-ac8d-8e1bd8242066-kube-api-access-pkqxm\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.943159 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.944181 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-bt6fn"] Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.947085 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.949988 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.950350 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.950588 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.951067 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kwdwq" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.954382 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:42:58 crc kubenswrapper[4791]: I1208 21:42:58.983216 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bt6fn"] Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.021925 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-credential-keys\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.022275 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-config-data\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.022438 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-combined-ca-bundle\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.022590 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-scripts\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.022670 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-fernet-keys\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.022769 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d769z\" (UniqueName: \"kubernetes.io/projected/a6d980c9-5283-4755-9cb5-5d86ed36edcf-kube-api-access-d769z\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.125159 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d769z\" (UniqueName: \"kubernetes.io/projected/a6d980c9-5283-4755-9cb5-5d86ed36edcf-kube-api-access-d769z\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.125868 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-credential-keys\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.125905 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-config-data\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.125951 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-combined-ca-bundle\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.125979 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-scripts\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.126023 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-fernet-keys\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.131056 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-config-data\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.131368 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-scripts\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.132280 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-combined-ca-bundle\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.132480 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-credential-keys\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.138227 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-fernet-keys\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.152779 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d769z\" (UniqueName: \"kubernetes.io/projected/a6d980c9-5283-4755-9cb5-5d86ed36edcf-kube-api-access-d769z\") pod \"keystone-bootstrap-bt6fn\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.211653 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.275937 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.614619 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50f29756-1345-4c4c-a9f5-bb817fcf0015" path="/var/lib/kubelet/pods/50f29756-1345-4c4c-a9f5-bb817fcf0015/volumes" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.616115 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c7f1c00-3243-46d2-96d6-1483681a8906" path="/var/lib/kubelet/pods/5c7f1c00-3243-46d2-96d6-1483681a8906/volumes" Dec 08 21:42:59 crc kubenswrapper[4791]: I1208 21:42:59.616869 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9780822-bcb2-4576-a005-16807709c69b" path="/var/lib/kubelet/pods/d9780822-bcb2-4576-a005-16807709c69b/volumes" Dec 08 21:43:05 crc kubenswrapper[4791]: I1208 21:43:05.790145 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: i/o timeout" Dec 08 21:43:05 crc kubenswrapper[4791]: I1208 21:43:05.791024 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.170165 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.328620 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.328697 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-config-data\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.328859 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-logs\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.328897 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-httpd-run\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.328923 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-scripts\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.328995 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdxqd\" (UniqueName: \"kubernetes.io/projected/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-kube-api-access-cdxqd\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.329079 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-combined-ca-bundle\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.330581 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-logs" (OuterVolumeSpecName: "logs") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.330685 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.335103 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-kube-api-access-cdxqd" (OuterVolumeSpecName: "kube-api-access-cdxqd") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2"). InnerVolumeSpecName "kube-api-access-cdxqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.341460 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-scripts" (OuterVolumeSpecName: "scripts") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: E1208 21:43:07.371752 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc podName:291eb1df-ac29-4217-9e9b-2a46b6fabfe2 nodeName:}" failed. No retries permitted until 2025-12-08 21:43:07.871689251 +0000 UTC m=+1464.570447596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "glance" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2") : kubernetes.io/csi: Unmounter.TearDownAt failed: rpc error: code = Unknown desc = check target path: could not get consistent content of /proc/mounts after 3 attempts Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.373673 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.386985 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-config-data" (OuterVolumeSpecName: "config-data") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.432566 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.432905 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdxqd\" (UniqueName: \"kubernetes.io/projected/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-kube-api-access-cdxqd\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.433013 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.433135 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.433277 4791 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.433412 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/291eb1df-ac29-4217-9e9b-2a46b6fabfe2-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.523900 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"291eb1df-ac29-4217-9e9b-2a46b6fabfe2","Type":"ContainerDied","Data":"216e830f770e5e68431075d9dceedf30b12b825051cc9769ca33ff596eaf8829"} Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.523977 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:07 crc kubenswrapper[4791]: E1208 21:43:07.656391 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 08 21:43:07 crc kubenswrapper[4791]: E1208 21:43:07.656556 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z7slx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-7srv6_openstack(d9f87323-3041-444f-b26d-c76871bd426f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:43:07 crc kubenswrapper[4791]: E1208 21:43:07.657665 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-7srv6" podUID="d9f87323-3041-444f-b26d-c76871bd426f" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.712455 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.842846 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzpqf\" (UniqueName: \"kubernetes.io/projected/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-kube-api-access-fzpqf\") pod \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.843516 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-config\") pod \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.843808 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-nb\") pod \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.843951 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-svc\") pod \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.844033 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-swift-storage-0\") pod \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.844084 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-sb\") pod \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\" (UID: \"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.846624 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-kube-api-access-fzpqf" (OuterVolumeSpecName: "kube-api-access-fzpqf") pod "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" (UID: "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f"). InnerVolumeSpecName "kube-api-access-fzpqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.891975 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-config" (OuterVolumeSpecName: "config") pod "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" (UID: "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.895667 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" (UID: "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.900064 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" (UID: "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.902053 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" (UID: "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.905439 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" (UID: "aaf43b8d-8ee8-4def-b8c3-8f5596513b2f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.945975 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\" (UID: \"291eb1df-ac29-4217-9e9b-2a46b6fabfe2\") " Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.946792 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.946808 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.946816 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.946824 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.946833 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzpqf\" (UniqueName: \"kubernetes.io/projected/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-kube-api-access-fzpqf\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.946843 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:07 crc kubenswrapper[4791]: I1208 21:43:07.959125 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc" (OuterVolumeSpecName: "glance") pod "291eb1df-ac29-4217-9e9b-2a46b6fabfe2" (UID: "291eb1df-ac29-4217-9e9b-2a46b6fabfe2"). InnerVolumeSpecName "pvc-3530eacc-1908-4492-a59f-15d59644c0dc". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.048464 4791 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") on node \"crc\" " Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.079480 4791 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.079661 4791 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-3530eacc-1908-4492-a59f-15d59644c0dc" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc") on node "crc" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.132353 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.143502 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.153041 4791 reconciler_common.go:293] "Volume detached for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.170192 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:43:08 crc kubenswrapper[4791]: E1208 21:43:08.170927 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-log" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.171906 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-log" Dec 08 21:43:08 crc kubenswrapper[4791]: E1208 21:43:08.171988 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.172046 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" Dec 08 21:43:08 crc kubenswrapper[4791]: E1208 21:43:08.172144 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-httpd" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.172220 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-httpd" Dec 08 21:43:08 crc kubenswrapper[4791]: E1208 21:43:08.172311 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="init" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.172374 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="init" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.172664 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.172760 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-log" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.172850 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" containerName="glance-httpd" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.174113 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.189978 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.190637 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.206170 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361442 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361511 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361560 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361606 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361633 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361657 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361677 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf4nt\" (UniqueName: \"kubernetes.io/projected/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-kube-api-access-zf4nt\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.361784 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.463946 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.464033 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.464111 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.464169 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.464220 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.464253 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.464288 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.464312 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf4nt\" (UniqueName: \"kubernetes.io/projected/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-kube-api-access-zf4nt\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.466013 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.467653 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.468416 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.468500 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b6e75dc9bbe8f5543873ba1da9c9ef2677b6e25aaa3d050091b399bf8011b4a0/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.469413 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.470353 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.477149 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.478111 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.479619 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf4nt\" (UniqueName: \"kubernetes.io/projected/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-kube-api-access-zf4nt\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.515349 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.542256 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" event={"ID":"aaf43b8d-8ee8-4def-b8c3-8f5596513b2f","Type":"ContainerDied","Data":"740df49a4847c9f376743eedfa02ad8ab302e14747cfff534fc727af1d6f9d85"} Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.542284 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.543937 4791 generic.go:334] "Generic (PLEG): container finished" podID="bd2c18de-3f41-4c59-b400-d96f39d28ec2" containerID="fb861b6e2cc7f2c3e139c059bc20dfd39ee6ec7bfb8586207b3f23753a0d3e4e" exitCode=0 Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.544002 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-779pn" event={"ID":"bd2c18de-3f41-4c59-b400-d96f39d28ec2","Type":"ContainerDied","Data":"fb861b6e2cc7f2c3e139c059bc20dfd39ee6ec7bfb8586207b3f23753a0d3e4e"} Dec 08 21:43:08 crc kubenswrapper[4791]: E1208 21:43:08.561950 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-7srv6" podUID="d9f87323-3041-444f-b26d-c76871bd426f" Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.609835 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-m7h8z"] Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.620363 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-m7h8z"] Dec 08 21:43:08 crc kubenswrapper[4791]: I1208 21:43:08.818644 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.008116 4791 scope.go:117] "RemoveContainer" containerID="5b51599cc33a3b8ffc146567886d28e282429698416af83a86f193436bb8c9fc" Dec 08 21:43:09 crc kubenswrapper[4791]: E1208 21:43:09.008111 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 08 21:43:09 crc kubenswrapper[4791]: E1208 21:43:09.008442 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xbpxg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-5vvss_openstack(ee824c0f-2eaa-4eee-8dcf-f487d9445012): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:43:09 crc kubenswrapper[4791]: E1208 21:43:09.009649 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-5vvss" podUID="ee824c0f-2eaa-4eee-8dcf-f487d9445012" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.320891 4791 scope.go:117] "RemoveContainer" containerID="f555e697b7b0dd29924ddffde7d840c1307d111f66d2ae27b1b1cbef89c75870" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.589483 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hdkvl" event={"ID":"242da563-f632-4ba3-be9e-bd7d0376120d","Type":"ContainerStarted","Data":"efbe1692614514a129d83ff55d87a7174a9446605c0dfa7b06ae2a46f9333723"} Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.592049 4791 scope.go:117] "RemoveContainer" containerID="e498cf25bdf3b538c6937e955c5ea536267edfdc1b66f60e7ae12a257b6dabb8" Dec 08 21:43:09 crc kubenswrapper[4791]: E1208 21:43:09.593647 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-5vvss" podUID="ee824c0f-2eaa-4eee-8dcf-f487d9445012" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.621557 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="291eb1df-ac29-4217-9e9b-2a46b6fabfe2" path="/var/lib/kubelet/pods/291eb1df-ac29-4217-9e9b-2a46b6fabfe2/volumes" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.622838 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" path="/var/lib/kubelet/pods/aaf43b8d-8ee8-4def-b8c3-8f5596513b2f/volumes" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.650924 4791 scope.go:117] "RemoveContainer" containerID="0e3777f9fbc6f74d7b48caed52a0a0dc3d1640ecf14d3ed5537ca8d19afd55be" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.659279 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-hdkvl" podStartSLOduration=4.356291993 podStartE2EDuration="33.65926011s" podCreationTimestamp="2025-12-08 21:42:36 +0000 UTC" firstStartedPulling="2025-12-08 21:42:38.336400975 +0000 UTC m=+1435.035159310" lastFinishedPulling="2025-12-08 21:43:07.639369082 +0000 UTC m=+1464.338127427" observedRunningTime="2025-12-08 21:43:09.649234564 +0000 UTC m=+1466.347992909" watchObservedRunningTime="2025-12-08 21:43:09.65926011 +0000 UTC m=+1466.358018455" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.694026 4791 scope.go:117] "RemoveContainer" containerID="f355ff51af3e60569527fccdacbea63b4b9b26033b20878391bfac6ddacf84ad" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.751857 4791 scope.go:117] "RemoveContainer" containerID="30f52b9f0202b8dc81727d5274491ad9f7c4275000853d29e7f2b3758fd29332" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.771132 4791 scope.go:117] "RemoveContainer" containerID="6ae75eb34657f585e1939faacaa48865a1916b8e299a99eefbff873707b6af10" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.801243 4791 scope.go:117] "RemoveContainer" containerID="6d67514fb4b6616bb01201c4f3c0d886e390f8ca5ba9089400cb30e06685b22a" Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.837936 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bt6fn"] Dec 08 21:43:09 crc kubenswrapper[4791]: W1208 21:43:09.866683 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6d980c9_5283_4755_9cb5_5d86ed36edcf.slice/crio-1224ba43227a9c4701e260c3053007da9717731a80404804d7a23754d1121685 WatchSource:0}: Error finding container 1224ba43227a9c4701e260c3053007da9717731a80404804d7a23754d1121685: Status 404 returned error can't find the container with id 1224ba43227a9c4701e260c3053007da9717731a80404804d7a23754d1121685 Dec 08 21:43:09 crc kubenswrapper[4791]: I1208 21:43:09.933174 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.095021 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.271468 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-779pn" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.436846 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-config\") pod \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.436907 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-combined-ca-bundle\") pod \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.436943 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gkbg\" (UniqueName: \"kubernetes.io/projected/bd2c18de-3f41-4c59-b400-d96f39d28ec2-kube-api-access-5gkbg\") pod \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\" (UID: \"bd2c18de-3f41-4c59-b400-d96f39d28ec2\") " Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.443827 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd2c18de-3f41-4c59-b400-d96f39d28ec2-kube-api-access-5gkbg" (OuterVolumeSpecName: "kube-api-access-5gkbg") pod "bd2c18de-3f41-4c59-b400-d96f39d28ec2" (UID: "bd2c18de-3f41-4c59-b400-d96f39d28ec2"). InnerVolumeSpecName "kube-api-access-5gkbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.476348 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-config" (OuterVolumeSpecName: "config") pod "bd2c18de-3f41-4c59-b400-d96f39d28ec2" (UID: "bd2c18de-3f41-4c59-b400-d96f39d28ec2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.479877 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd2c18de-3f41-4c59-b400-d96f39d28ec2" (UID: "bd2c18de-3f41-4c59-b400-d96f39d28ec2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.540698 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.540760 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2c18de-3f41-4c59-b400-d96f39d28ec2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.540774 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gkbg\" (UniqueName: \"kubernetes.io/projected/bd2c18de-3f41-4c59-b400-d96f39d28ec2-kube-api-access-5gkbg\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.634006 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-779pn" event={"ID":"bd2c18de-3f41-4c59-b400-d96f39d28ec2","Type":"ContainerDied","Data":"36eba8f340c164f219eb19f43a6906f552dca725b263c2866ccc07a2b3afe9c1"} Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.634393 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36eba8f340c164f219eb19f43a6906f552dca725b263c2866ccc07a2b3afe9c1" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.634454 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-779pn" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.655539 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-kfqgj" event={"ID":"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e","Type":"ContainerStarted","Data":"907c61b885c94e5a7baf7b609f9e0b5e5c2b64a469c42d80901ad3a1f6464f98"} Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.670052 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"477be1bd-91b3-46cb-ac8d-8e1bd8242066","Type":"ContainerStarted","Data":"1ba9a8d18a906fa1bb1117f2d09f67605b1719180197ad411a7ec95067441b16"} Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.728464 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bt6fn" event={"ID":"a6d980c9-5283-4755-9cb5-5d86ed36edcf","Type":"ContainerStarted","Data":"8d29b4dffdccc90ca88f0c76136906a7a9d4109c008fb7bc94d6bffa7d722ef7"} Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.729047 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bt6fn" event={"ID":"a6d980c9-5283-4755-9cb5-5d86ed36edcf","Type":"ContainerStarted","Data":"1224ba43227a9c4701e260c3053007da9717731a80404804d7a23754d1121685"} Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.738313 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5","Type":"ContainerStarted","Data":"d1efd7b7dac755b189e044b492a51022ae88d1005d04133b560c91488aa64709"} Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.774648 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-kfqgj" podStartSLOduration=4.122968808 podStartE2EDuration="34.774624672s" podCreationTimestamp="2025-12-08 21:42:36 +0000 UTC" firstStartedPulling="2025-12-08 21:42:38.356576071 +0000 UTC m=+1435.055334406" lastFinishedPulling="2025-12-08 21:43:09.008231925 +0000 UTC m=+1465.706990270" observedRunningTime="2025-12-08 21:43:10.705472992 +0000 UTC m=+1467.404231347" watchObservedRunningTime="2025-12-08 21:43:10.774624672 +0000 UTC m=+1467.473383017" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.801116 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-m7h8z" podUID="aaf43b8d-8ee8-4def-b8c3-8f5596513b2f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: i/o timeout" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.820105 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-bt6fn" podStartSLOduration=12.82007682 podStartE2EDuration="12.82007682s" podCreationTimestamp="2025-12-08 21:42:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:10.768525422 +0000 UTC m=+1467.467283767" watchObservedRunningTime="2025-12-08 21:43:10.82007682 +0000 UTC m=+1467.518835175" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.878908 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-qs5ms"] Dec 08 21:43:10 crc kubenswrapper[4791]: E1208 21:43:10.880036 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2c18de-3f41-4c59-b400-d96f39d28ec2" containerName="neutron-db-sync" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.880066 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2c18de-3f41-4c59-b400-d96f39d28ec2" containerName="neutron-db-sync" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.880513 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd2c18de-3f41-4c59-b400-d96f39d28ec2" containerName="neutron-db-sync" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.883348 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.885871 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-qs5ms"] Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.952005 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.952093 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzmcb\" (UniqueName: \"kubernetes.io/projected/d442da86-b34f-479a-95ac-71368a15d3f9-kube-api-access-pzmcb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.960747 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.960831 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-config\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.960888 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-svc\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.960997 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.969320 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5cd74df676-2lb7z"] Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.971292 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.973133 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-24g9s" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.981878 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.982370 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.983141 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 08 21:43:10 crc kubenswrapper[4791]: I1208 21:43:10.983037 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cd74df676-2lb7z"] Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067100 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-ovndb-tls-certs\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067410 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067443 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-config\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067464 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-combined-ca-bundle\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067495 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-svc\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067539 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067564 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-config\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067596 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067613 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2552t\" (UniqueName: \"kubernetes.io/projected/d752bdcc-7c7f-4b73-9052-02a6495248d8-kube-api-access-2552t\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067633 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzmcb\" (UniqueName: \"kubernetes.io/projected/d442da86-b34f-479a-95ac-71368a15d3f9-kube-api-access-pzmcb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.067678 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-httpd-config\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.068848 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.068964 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.069037 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-config\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.070117 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.070294 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-svc\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.096771 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzmcb\" (UniqueName: \"kubernetes.io/projected/d442da86-b34f-479a-95ac-71368a15d3f9-kube-api-access-pzmcb\") pod \"dnsmasq-dns-6b7b667979-qs5ms\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.171093 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-config\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.171240 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2552t\" (UniqueName: \"kubernetes.io/projected/d752bdcc-7c7f-4b73-9052-02a6495248d8-kube-api-access-2552t\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.171396 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-httpd-config\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.171657 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-ovndb-tls-certs\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.171748 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-combined-ca-bundle\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.178205 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-config\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.182515 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-httpd-config\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.182907 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-combined-ca-bundle\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.184614 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-ovndb-tls-certs\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.195671 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2552t\" (UniqueName: \"kubernetes.io/projected/d752bdcc-7c7f-4b73-9052-02a6495248d8-kube-api-access-2552t\") pod \"neutron-5cd74df676-2lb7z\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.242569 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.317433 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.849322 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5","Type":"ContainerStarted","Data":"f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760"} Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.859065 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-qs5ms"] Dec 08 21:43:11 crc kubenswrapper[4791]: I1208 21:43:11.862557 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"477be1bd-91b3-46cb-ac8d-8e1bd8242066","Type":"ContainerStarted","Data":"b76ecd3a1ebf375f740d54b66e488f64605f0612f4fde44c2a1ad904ceb0ff88"} Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.319916 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cd74df676-2lb7z"] Dec 08 21:43:12 crc kubenswrapper[4791]: W1208 21:43:12.334081 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice/crio-b38fa60d5d677e6d5deb9a62e7734d0f59f6b1d771718a07842760bc602c2ca4 WatchSource:0}: Error finding container b38fa60d5d677e6d5deb9a62e7734d0f59f6b1d771718a07842760bc602c2ca4: Status 404 returned error can't find the container with id b38fa60d5d677e6d5deb9a62e7734d0f59f6b1d771718a07842760bc602c2ca4 Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.907120 4791 generic.go:334] "Generic (PLEG): container finished" podID="d442da86-b34f-479a-95ac-71368a15d3f9" containerID="0c64f70188c18c717bc664ed05a6c3a67ef3c17b6cf7d49731ddcb785e92fe2d" exitCode=0 Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.907270 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" event={"ID":"d442da86-b34f-479a-95ac-71368a15d3f9","Type":"ContainerDied","Data":"0c64f70188c18c717bc664ed05a6c3a67ef3c17b6cf7d49731ddcb785e92fe2d"} Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.907594 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" event={"ID":"d442da86-b34f-479a-95ac-71368a15d3f9","Type":"ContainerStarted","Data":"60af05e3c97541c558dbb3bc842e3cc3a9254fe592674e0235258556438e4142"} Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.934668 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"477be1bd-91b3-46cb-ac8d-8e1bd8242066","Type":"ContainerStarted","Data":"f05edc8ce19b5afe71859980edb82c0121f1e4915bad8def8ab725c39e8e82fa"} Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.941219 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5","Type":"ContainerStarted","Data":"3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111"} Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.950228 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd74df676-2lb7z" event={"ID":"d752bdcc-7c7f-4b73-9052-02a6495248d8","Type":"ContainerStarted","Data":"77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494"} Dec 08 21:43:12 crc kubenswrapper[4791]: I1208 21:43:12.950288 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd74df676-2lb7z" event={"ID":"d752bdcc-7c7f-4b73-9052-02a6495248d8","Type":"ContainerStarted","Data":"b38fa60d5d677e6d5deb9a62e7734d0f59f6b1d771718a07842760bc602c2ca4"} Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.002838 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=15.002807052 podStartE2EDuration="15.002807052s" podCreationTimestamp="2025-12-08 21:42:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:12.965162896 +0000 UTC m=+1469.663921241" watchObservedRunningTime="2025-12-08 21:43:13.002807052 +0000 UTC m=+1469.701565397" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.009013 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.008994254 podStartE2EDuration="5.008994254s" podCreationTimestamp="2025-12-08 21:43:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:12.990574701 +0000 UTC m=+1469.689333046" watchObservedRunningTime="2025-12-08 21:43:13.008994254 +0000 UTC m=+1469.707752599" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.358194 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-947b647f-l42kf"] Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.360693 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.365027 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.365231 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.371664 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-947b647f-l42kf"] Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.535855 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-combined-ca-bundle\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.536184 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjmjq\" (UniqueName: \"kubernetes.io/projected/9e54b9bb-f636-476f-8285-01dc712110d5-kube-api-access-tjmjq\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.536388 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-ovndb-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.536458 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-httpd-config\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.536495 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-config\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.536814 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-internal-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.536882 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-public-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.639300 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-internal-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.639659 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-public-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.639693 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-combined-ca-bundle\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.639846 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjmjq\" (UniqueName: \"kubernetes.io/projected/9e54b9bb-f636-476f-8285-01dc712110d5-kube-api-access-tjmjq\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.639909 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-ovndb-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.639942 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-httpd-config\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.639970 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-config\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.644047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-combined-ca-bundle\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.644169 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-public-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.645053 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-internal-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.645141 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-ovndb-tls-certs\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.645354 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-httpd-config\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.647987 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e54b9bb-f636-476f-8285-01dc712110d5-config\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.663958 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjmjq\" (UniqueName: \"kubernetes.io/projected/9e54b9bb-f636-476f-8285-01dc712110d5-kube-api-access-tjmjq\") pod \"neutron-947b647f-l42kf\" (UID: \"9e54b9bb-f636-476f-8285-01dc712110d5\") " pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.678817 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.968863 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd74df676-2lb7z" event={"ID":"d752bdcc-7c7f-4b73-9052-02a6495248d8","Type":"ContainerStarted","Data":"5536c16bac48a9a56d408aa81894ad58d27d0443620da195ca65cc6940dce891"} Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.969353 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:13 crc kubenswrapper[4791]: I1208 21:43:13.971622 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" event={"ID":"d442da86-b34f-479a-95ac-71368a15d3f9","Type":"ContainerStarted","Data":"9417584f4336ff9687fae672a5c073284f152e15963f3ae46bd38e96d4cd4552"} Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.007139 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5cd74df676-2lb7z" podStartSLOduration=4.007116173 podStartE2EDuration="4.007116173s" podCreationTimestamp="2025-12-08 21:43:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:14.00130695 +0000 UTC m=+1470.700065295" watchObservedRunningTime="2025-12-08 21:43:14.007116173 +0000 UTC m=+1470.705874518" Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.025787 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" podStartSLOduration=4.025763102 podStartE2EDuration="4.025763102s" podCreationTimestamp="2025-12-08 21:43:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:14.020306777 +0000 UTC m=+1470.719065132" watchObservedRunningTime="2025-12-08 21:43:14.025763102 +0000 UTC m=+1470.724521457" Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.261932 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-947b647f-l42kf"] Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.983294 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-947b647f-l42kf" event={"ID":"9e54b9bb-f636-476f-8285-01dc712110d5","Type":"ContainerStarted","Data":"cd1913a7596b0363f9d8296f193d948bddc209d986b49719ed7fccf6c11d2372"} Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.983893 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-947b647f-l42kf" event={"ID":"9e54b9bb-f636-476f-8285-01dc712110d5","Type":"ContainerStarted","Data":"434ce2d29d220ac40c516906ad53c4c166c67a0aa632e4c181eefd5bf0f3528f"} Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.983924 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.983939 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-947b647f-l42kf" event={"ID":"9e54b9bb-f636-476f-8285-01dc712110d5","Type":"ContainerStarted","Data":"b06953e1c3afc9f348cfb7287bbb87aa05b6e4c3a3ce9c35708849a8a70ae41c"} Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.986152 4791 generic.go:334] "Generic (PLEG): container finished" podID="a6d980c9-5283-4755-9cb5-5d86ed36edcf" containerID="8d29b4dffdccc90ca88f0c76136906a7a9d4109c008fb7bc94d6bffa7d722ef7" exitCode=0 Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.986225 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bt6fn" event={"ID":"a6d980c9-5283-4755-9cb5-5d86ed36edcf","Type":"ContainerDied","Data":"8d29b4dffdccc90ca88f0c76136906a7a9d4109c008fb7bc94d6bffa7d722ef7"} Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.988221 4791 generic.go:334] "Generic (PLEG): container finished" podID="242da563-f632-4ba3-be9e-bd7d0376120d" containerID="efbe1692614514a129d83ff55d87a7174a9446605c0dfa7b06ae2a46f9333723" exitCode=0 Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.989485 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hdkvl" event={"ID":"242da563-f632-4ba3-be9e-bd7d0376120d","Type":"ContainerDied","Data":"efbe1692614514a129d83ff55d87a7174a9446605c0dfa7b06ae2a46f9333723"} Dec 08 21:43:14 crc kubenswrapper[4791]: I1208 21:43:14.990127 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:15 crc kubenswrapper[4791]: I1208 21:43:15.039295 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-947b647f-l42kf" podStartSLOduration=2.039269668 podStartE2EDuration="2.039269668s" podCreationTimestamp="2025-12-08 21:43:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:15.030693907 +0000 UTC m=+1471.729452252" watchObservedRunningTime="2025-12-08 21:43:15.039269668 +0000 UTC m=+1471.738028013" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.004774 4791 generic.go:334] "Generic (PLEG): container finished" podID="4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" containerID="907c61b885c94e5a7baf7b609f9e0b5e5c2b64a469c42d80901ad3a1f6464f98" exitCode=0 Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.004842 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-kfqgj" event={"ID":"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e","Type":"ContainerDied","Data":"907c61b885c94e5a7baf7b609f9e0b5e5c2b64a469c42d80901ad3a1f6464f98"} Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.577356 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.580643 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hdkvl" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710001 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-combined-ca-bundle\") pod \"242da563-f632-4ba3-be9e-bd7d0376120d\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710321 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/242da563-f632-4ba3-be9e-bd7d0376120d-logs\") pod \"242da563-f632-4ba3-be9e-bd7d0376120d\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710388 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-scripts\") pod \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710445 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d769z\" (UniqueName: \"kubernetes.io/projected/a6d980c9-5283-4755-9cb5-5d86ed36edcf-kube-api-access-d769z\") pod \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710482 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gj7v\" (UniqueName: \"kubernetes.io/projected/242da563-f632-4ba3-be9e-bd7d0376120d-kube-api-access-8gj7v\") pod \"242da563-f632-4ba3-be9e-bd7d0376120d\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710506 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-config-data\") pod \"242da563-f632-4ba3-be9e-bd7d0376120d\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710555 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-combined-ca-bundle\") pod \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710574 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-config-data\") pod \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710633 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-scripts\") pod \"242da563-f632-4ba3-be9e-bd7d0376120d\" (UID: \"242da563-f632-4ba3-be9e-bd7d0376120d\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710690 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-fernet-keys\") pod \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710818 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-credential-keys\") pod \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\" (UID: \"a6d980c9-5283-4755-9cb5-5d86ed36edcf\") " Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.710860 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/242da563-f632-4ba3-be9e-bd7d0376120d-logs" (OuterVolumeSpecName: "logs") pod "242da563-f632-4ba3-be9e-bd7d0376120d" (UID: "242da563-f632-4ba3-be9e-bd7d0376120d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.711322 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/242da563-f632-4ba3-be9e-bd7d0376120d-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.716151 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-scripts" (OuterVolumeSpecName: "scripts") pod "242da563-f632-4ba3-be9e-bd7d0376120d" (UID: "242da563-f632-4ba3-be9e-bd7d0376120d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.716992 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a6d980c9-5283-4755-9cb5-5d86ed36edcf" (UID: "a6d980c9-5283-4755-9cb5-5d86ed36edcf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.718931 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a6d980c9-5283-4755-9cb5-5d86ed36edcf" (UID: "a6d980c9-5283-4755-9cb5-5d86ed36edcf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.720844 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/242da563-f632-4ba3-be9e-bd7d0376120d-kube-api-access-8gj7v" (OuterVolumeSpecName: "kube-api-access-8gj7v") pod "242da563-f632-4ba3-be9e-bd7d0376120d" (UID: "242da563-f632-4ba3-be9e-bd7d0376120d"). InnerVolumeSpecName "kube-api-access-8gj7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.721877 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6d980c9-5283-4755-9cb5-5d86ed36edcf-kube-api-access-d769z" (OuterVolumeSpecName: "kube-api-access-d769z") pod "a6d980c9-5283-4755-9cb5-5d86ed36edcf" (UID: "a6d980c9-5283-4755-9cb5-5d86ed36edcf"). InnerVolumeSpecName "kube-api-access-d769z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.723032 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-scripts" (OuterVolumeSpecName: "scripts") pod "a6d980c9-5283-4755-9cb5-5d86ed36edcf" (UID: "a6d980c9-5283-4755-9cb5-5d86ed36edcf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.744771 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6d980c9-5283-4755-9cb5-5d86ed36edcf" (UID: "a6d980c9-5283-4755-9cb5-5d86ed36edcf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.746967 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "242da563-f632-4ba3-be9e-bd7d0376120d" (UID: "242da563-f632-4ba3-be9e-bd7d0376120d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.750358 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-config-data" (OuterVolumeSpecName: "config-data") pod "242da563-f632-4ba3-be9e-bd7d0376120d" (UID: "242da563-f632-4ba3-be9e-bd7d0376120d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.757236 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-config-data" (OuterVolumeSpecName: "config-data") pod "a6d980c9-5283-4755-9cb5-5d86ed36edcf" (UID: "a6d980c9-5283-4755-9cb5-5d86ed36edcf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813764 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813827 4791 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813837 4791 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813850 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813858 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813866 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d769z\" (UniqueName: \"kubernetes.io/projected/a6d980c9-5283-4755-9cb5-5d86ed36edcf-kube-api-access-d769z\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813875 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gj7v\" (UniqueName: \"kubernetes.io/projected/242da563-f632-4ba3-be9e-bd7d0376120d-kube-api-access-8gj7v\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813883 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242da563-f632-4ba3-be9e-bd7d0376120d-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813890 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:16 crc kubenswrapper[4791]: I1208 21:43:16.813898 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d980c9-5283-4755-9cb5-5d86ed36edcf-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.020267 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bt6fn" event={"ID":"a6d980c9-5283-4755-9cb5-5d86ed36edcf","Type":"ContainerDied","Data":"1224ba43227a9c4701e260c3053007da9717731a80404804d7a23754d1121685"} Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.020370 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1224ba43227a9c4701e260c3053007da9717731a80404804d7a23754d1121685" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.020299 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bt6fn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.023256 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hdkvl" event={"ID":"242da563-f632-4ba3-be9e-bd7d0376120d","Type":"ContainerDied","Data":"a00910334bd2ef97d82095a9fe0ba57c1ba1844e2983157a20bda02db6d6486a"} Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.023289 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hdkvl" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.023315 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a00910334bd2ef97d82095a9fe0ba57c1ba1844e2983157a20bda02db6d6486a" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.234548 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-76fcb88b6d-mffmn"] Dec 08 21:43:17 crc kubenswrapper[4791]: E1208 21:43:17.235307 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6d980c9-5283-4755-9cb5-5d86ed36edcf" containerName="keystone-bootstrap" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.235323 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6d980c9-5283-4755-9cb5-5d86ed36edcf" containerName="keystone-bootstrap" Dec 08 21:43:17 crc kubenswrapper[4791]: E1208 21:43:17.235368 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242da563-f632-4ba3-be9e-bd7d0376120d" containerName="placement-db-sync" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.235375 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="242da563-f632-4ba3-be9e-bd7d0376120d" containerName="placement-db-sync" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.235583 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6d980c9-5283-4755-9cb5-5d86ed36edcf" containerName="keystone-bootstrap" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.235607 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="242da563-f632-4ba3-be9e-bd7d0376120d" containerName="placement-db-sync" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.239169 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.244205 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.244205 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.244458 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.244618 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.244787 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kh27z" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.258968 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-76fcb88b6d-mffmn"] Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.328754 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458287a-1c73-47c0-8a35-a3b14ed39fab-logs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.328887 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-internal-tls-certs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.328946 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-scripts\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.329020 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-public-tls-certs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.329052 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-config-data\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.329404 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-combined-ca-bundle\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.329641 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxrnb\" (UniqueName: \"kubernetes.io/projected/9458287a-1c73-47c0-8a35-a3b14ed39fab-kube-api-access-hxrnb\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.431793 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-internal-tls-certs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.431907 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-scripts\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.431950 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-public-tls-certs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.431983 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-config-data\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.432128 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-combined-ca-bundle\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.432234 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxrnb\" (UniqueName: \"kubernetes.io/projected/9458287a-1c73-47c0-8a35-a3b14ed39fab-kube-api-access-hxrnb\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.432303 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458287a-1c73-47c0-8a35-a3b14ed39fab-logs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.432921 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458287a-1c73-47c0-8a35-a3b14ed39fab-logs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.437183 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-internal-tls-certs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.437663 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-config-data\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.441347 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-public-tls-certs\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.442648 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-combined-ca-bundle\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.443145 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9458287a-1c73-47c0-8a35-a3b14ed39fab-scripts\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.452158 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxrnb\" (UniqueName: \"kubernetes.io/projected/9458287a-1c73-47c0-8a35-a3b14ed39fab-kube-api-access-hxrnb\") pod \"placement-76fcb88b6d-mffmn\" (UID: \"9458287a-1c73-47c0-8a35-a3b14ed39fab\") " pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:17 crc kubenswrapper[4791]: I1208 21:43:17.574327 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.044549 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5fd957fc96-br4ld"] Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.046392 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.050454 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.050622 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.050823 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.050963 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.051899 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kwdwq" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.052190 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.087028 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5fd957fc96-br4ld"] Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.164617 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-credential-keys\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.164959 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-fernet-keys\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.165095 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-public-tls-certs\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.165221 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-scripts\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.165380 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58m9z\" (UniqueName: \"kubernetes.io/projected/abde564f-96c1-47f0-ab05-c8e54905668b-kube-api-access-58m9z\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.165511 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-combined-ca-bundle\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.165619 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-internal-tls-certs\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.165748 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-config-data\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.270696 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-config-data\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.271344 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-credential-keys\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.271600 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-fernet-keys\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.271858 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-public-tls-certs\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.272038 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-scripts\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.272248 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58m9z\" (UniqueName: \"kubernetes.io/projected/abde564f-96c1-47f0-ab05-c8e54905668b-kube-api-access-58m9z\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.272368 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-combined-ca-bundle\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.272487 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-internal-tls-certs\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.278680 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-public-tls-certs\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.279385 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-credential-keys\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.280626 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-config-data\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.286295 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-fernet-keys\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.290408 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-scripts\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.291072 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-internal-tls-certs\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.297561 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abde564f-96c1-47f0-ab05-c8e54905668b-combined-ca-bundle\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.301784 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58m9z\" (UniqueName: \"kubernetes.io/projected/abde564f-96c1-47f0-ab05-c8e54905668b-kube-api-access-58m9z\") pod \"keystone-5fd957fc96-br4ld\" (UID: \"abde564f-96c1-47f0-ab05-c8e54905668b\") " pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.403972 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-kfqgj" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.480425 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.481948 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-combined-ca-bundle\") pod \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.482222 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-config-data\") pod \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.482415 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dzpg\" (UniqueName: \"kubernetes.io/projected/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-kube-api-access-7dzpg\") pod \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\" (UID: \"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e\") " Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.524606 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-kube-api-access-7dzpg" (OuterVolumeSpecName: "kube-api-access-7dzpg") pod "4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" (UID: "4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e"). InnerVolumeSpecName "kube-api-access-7dzpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.589633 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dzpg\" (UniqueName: \"kubernetes.io/projected/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-kube-api-access-7dzpg\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.615760 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" (UID: "4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.695271 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.729974 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-config-data" (OuterVolumeSpecName: "config-data") pod "4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" (UID: "4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.758787 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-76fcb88b6d-mffmn"] Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.798051 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.819871 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:18 crc kubenswrapper[4791]: I1208 21:43:18.819934 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.089883 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-76fcb88b6d-mffmn" event={"ID":"9458287a-1c73-47c0-8a35-a3b14ed39fab","Type":"ContainerStarted","Data":"5fd39098d767ec4f4deb4e16f8273f1596c338259caa54c3cdb86dd87dd2795a"} Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.091329 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-kfqgj" event={"ID":"4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e","Type":"ContainerDied","Data":"745e27ac504a5c4c3d5a5f531469d4ba4667837ea90a0294f9ae31b72ef5595a"} Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.091375 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="745e27ac504a5c4c3d5a5f531469d4ba4667837ea90a0294f9ae31b72ef5595a" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.091395 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-kfqgj" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.116918 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.118817 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.119487 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.187865 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5fd957fc96-br4ld"] Dec 08 21:43:19 crc kubenswrapper[4791]: W1208 21:43:19.189687 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabde564f_96c1_47f0_ab05_c8e54905668b.slice/crio-4074d762cb6b5b8bf3f94fad899c0ab7755cf657fb54683c4ac0f6ee0c0c0851 WatchSource:0}: Error finding container 4074d762cb6b5b8bf3f94fad899c0ab7755cf657fb54683c4ac0f6ee0c0c0851: Status 404 returned error can't find the container with id 4074d762cb6b5b8bf3f94fad899c0ab7755cf657fb54683c4ac0f6ee0c0c0851 Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.212945 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.213005 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.254520 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:43:19 crc kubenswrapper[4791]: I1208 21:43:19.283592 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:43:20 crc kubenswrapper[4791]: I1208 21:43:20.104032 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5fd957fc96-br4ld" event={"ID":"abde564f-96c1-47f0-ab05-c8e54905668b","Type":"ContainerStarted","Data":"4074d762cb6b5b8bf3f94fad899c0ab7755cf657fb54683c4ac0f6ee0c0c0851"} Dec 08 21:43:20 crc kubenswrapper[4791]: I1208 21:43:20.105144 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:43:20 crc kubenswrapper[4791]: I1208 21:43:20.105192 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:20 crc kubenswrapper[4791]: I1208 21:43:20.105208 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:43:21 crc kubenswrapper[4791]: I1208 21:43:21.124059 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:43:21 crc kubenswrapper[4791]: I1208 21:43:21.245129 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:21 crc kubenswrapper[4791]: I1208 21:43:21.334818 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-xlwz8"] Dec 08 21:43:21 crc kubenswrapper[4791]: I1208 21:43:21.335097 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" podUID="74852b39-364a-40e3-8ed8-a24178ee0403" containerName="dnsmasq-dns" containerID="cri-o://c8ac760dcd8e06f76c056d88cb5383c05c088e0c91a71a268c06064ee5cc600b" gracePeriod=10 Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.195331 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5fd957fc96-br4ld" event={"ID":"abde564f-96c1-47f0-ab05-c8e54905668b","Type":"ContainerStarted","Data":"c4f695205bbba335681523c49be361e475c67c5631308bfa5187309ab35557fc"} Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.195411 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.204124 4791 generic.go:334] "Generic (PLEG): container finished" podID="74852b39-364a-40e3-8ed8-a24178ee0403" containerID="c8ac760dcd8e06f76c056d88cb5383c05c088e0c91a71a268c06064ee5cc600b" exitCode=0 Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.204458 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" event={"ID":"74852b39-364a-40e3-8ed8-a24178ee0403","Type":"ContainerDied","Data":"c8ac760dcd8e06f76c056d88cb5383c05c088e0c91a71a268c06064ee5cc600b"} Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.212556 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.212580 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.213599 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-76fcb88b6d-mffmn" event={"ID":"9458287a-1c73-47c0-8a35-a3b14ed39fab","Type":"ContainerStarted","Data":"e5df8c52ec0a239b0d953402c7726a83c1f8bd78d6ffc06377dd562b62f7389e"} Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.265788 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5fd957fc96-br4ld" podStartSLOduration=4.265769542 podStartE2EDuration="4.265769542s" podCreationTimestamp="2025-12-08 21:43:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:22.234003241 +0000 UTC m=+1478.932761586" watchObservedRunningTime="2025-12-08 21:43:22.265769542 +0000 UTC m=+1478.964527887" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.447976 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.506582 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-config\") pod \"74852b39-364a-40e3-8ed8-a24178ee0403\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.506694 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-swift-storage-0\") pod \"74852b39-364a-40e3-8ed8-a24178ee0403\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.506795 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-sb\") pod \"74852b39-364a-40e3-8ed8-a24178ee0403\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.506872 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-svc\") pod \"74852b39-364a-40e3-8ed8-a24178ee0403\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.506967 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m849h\" (UniqueName: \"kubernetes.io/projected/74852b39-364a-40e3-8ed8-a24178ee0403-kube-api-access-m849h\") pod \"74852b39-364a-40e3-8ed8-a24178ee0403\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.507108 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-nb\") pod \"74852b39-364a-40e3-8ed8-a24178ee0403\" (UID: \"74852b39-364a-40e3-8ed8-a24178ee0403\") " Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.533613 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74852b39-364a-40e3-8ed8-a24178ee0403-kube-api-access-m849h" (OuterVolumeSpecName: "kube-api-access-m849h") pod "74852b39-364a-40e3-8ed8-a24178ee0403" (UID: "74852b39-364a-40e3-8ed8-a24178ee0403"). InnerVolumeSpecName "kube-api-access-m849h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.611355 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m849h\" (UniqueName: \"kubernetes.io/projected/74852b39-364a-40e3-8ed8-a24178ee0403-kube-api-access-m849h\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.779450 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "74852b39-364a-40e3-8ed8-a24178ee0403" (UID: "74852b39-364a-40e3-8ed8-a24178ee0403"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.781832 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.781945 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.794499 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "74852b39-364a-40e3-8ed8-a24178ee0403" (UID: "74852b39-364a-40e3-8ed8-a24178ee0403"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.816943 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-config" (OuterVolumeSpecName: "config") pod "74852b39-364a-40e3-8ed8-a24178ee0403" (UID: "74852b39-364a-40e3-8ed8-a24178ee0403"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.821197 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.821230 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.821241 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.841129 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.859780 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "74852b39-364a-40e3-8ed8-a24178ee0403" (UID: "74852b39-364a-40e3-8ed8-a24178ee0403"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.860482 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "74852b39-364a-40e3-8ed8-a24178ee0403" (UID: "74852b39-364a-40e3-8ed8-a24178ee0403"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.924074 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.924584 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74852b39-364a-40e3-8ed8-a24178ee0403-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.953587 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:43:22 crc kubenswrapper[4791]: I1208 21:43:22.970437 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.225422 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7srv6" event={"ID":"d9f87323-3041-444f-b26d-c76871bd426f","Type":"ContainerStarted","Data":"c35ab6fa6c43f48deabd6df0a1d6ddb73543c365211d4a5c768078b80f38864f"} Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.238422 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-76fcb88b6d-mffmn" event={"ID":"9458287a-1c73-47c0-8a35-a3b14ed39fab","Type":"ContainerStarted","Data":"b69ae391cb7823bf3905b0b60f769b1d709ea5fe60fcf69f19ab89366dab45dc"} Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.238511 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.238545 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.247797 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" event={"ID":"74852b39-364a-40e3-8ed8-a24178ee0403","Type":"ContainerDied","Data":"a46ae09220334b3227ac60e278499e4f277585010c3577f7a49646f0f3a3d0d1"} Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.247865 4791 scope.go:117] "RemoveContainer" containerID="c8ac760dcd8e06f76c056d88cb5383c05c088e0c91a71a268c06064ee5cc600b" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.248061 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-xlwz8" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.253930 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-7srv6" podStartSLOduration=4.496838749 podStartE2EDuration="47.253910556s" podCreationTimestamp="2025-12-08 21:42:36 +0000 UTC" firstStartedPulling="2025-12-08 21:42:39.349894681 +0000 UTC m=+1436.048653026" lastFinishedPulling="2025-12-08 21:43:22.106966488 +0000 UTC m=+1478.805724833" observedRunningTime="2025-12-08 21:43:23.242644579 +0000 UTC m=+1479.941402934" watchObservedRunningTime="2025-12-08 21:43:23.253910556 +0000 UTC m=+1479.952668901" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.273150 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-76fcb88b6d-mffmn" podStartSLOduration=6.273128328 podStartE2EDuration="6.273128328s" podCreationTimestamp="2025-12-08 21:43:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:23.266477985 +0000 UTC m=+1479.965236330" watchObservedRunningTime="2025-12-08 21:43:23.273128328 +0000 UTC m=+1479.971886673" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.295399 4791 scope.go:117] "RemoveContainer" containerID="5574f52dc2ccd067a086cb88440dcf6edf8a3e09c924dabad0698cfd82165c6a" Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.305804 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-xlwz8"] Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.308953 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-xlwz8"] Dec 08 21:43:23 crc kubenswrapper[4791]: I1208 21:43:23.638201 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74852b39-364a-40e3-8ed8-a24178ee0403" path="/var/lib/kubelet/pods/74852b39-364a-40e3-8ed8-a24178ee0403/volumes" Dec 08 21:43:24 crc kubenswrapper[4791]: I1208 21:43:24.261551 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5vvss" event={"ID":"ee824c0f-2eaa-4eee-8dcf-f487d9445012","Type":"ContainerStarted","Data":"940d84f1c607e42e27d563201496fe5fee7d39f5ab579471f4695b8ded2bd000"} Dec 08 21:43:24 crc kubenswrapper[4791]: I1208 21:43:24.280901 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-5vvss" podStartSLOduration=4.296388312 podStartE2EDuration="48.280882394s" podCreationTimestamp="2025-12-08 21:42:36 +0000 UTC" firstStartedPulling="2025-12-08 21:42:38.336792495 +0000 UTC m=+1435.035550840" lastFinishedPulling="2025-12-08 21:43:22.321286577 +0000 UTC m=+1479.020044922" observedRunningTime="2025-12-08 21:43:24.278077206 +0000 UTC m=+1480.976835551" watchObservedRunningTime="2025-12-08 21:43:24.280882394 +0000 UTC m=+1480.979640739" Dec 08 21:43:27 crc kubenswrapper[4791]: I1208 21:43:27.292703 4791 generic.go:334] "Generic (PLEG): container finished" podID="d9f87323-3041-444f-b26d-c76871bd426f" containerID="c35ab6fa6c43f48deabd6df0a1d6ddb73543c365211d4a5c768078b80f38864f" exitCode=0 Dec 08 21:43:27 crc kubenswrapper[4791]: I1208 21:43:27.292748 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7srv6" event={"ID":"d9f87323-3041-444f-b26d-c76871bd426f","Type":"ContainerDied","Data":"c35ab6fa6c43f48deabd6df0a1d6ddb73543c365211d4a5c768078b80f38864f"} Dec 08 21:43:28 crc kubenswrapper[4791]: I1208 21:43:28.838807 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7srv6" Dec 08 21:43:28 crc kubenswrapper[4791]: I1208 21:43:28.966266 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7slx\" (UniqueName: \"kubernetes.io/projected/d9f87323-3041-444f-b26d-c76871bd426f-kube-api-access-z7slx\") pod \"d9f87323-3041-444f-b26d-c76871bd426f\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " Dec 08 21:43:28 crc kubenswrapper[4791]: I1208 21:43:28.966413 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-combined-ca-bundle\") pod \"d9f87323-3041-444f-b26d-c76871bd426f\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " Dec 08 21:43:28 crc kubenswrapper[4791]: I1208 21:43:28.966468 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-db-sync-config-data\") pod \"d9f87323-3041-444f-b26d-c76871bd426f\" (UID: \"d9f87323-3041-444f-b26d-c76871bd426f\") " Dec 08 21:43:28 crc kubenswrapper[4791]: I1208 21:43:28.973391 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d9f87323-3041-444f-b26d-c76871bd426f" (UID: "d9f87323-3041-444f-b26d-c76871bd426f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:28 crc kubenswrapper[4791]: I1208 21:43:28.974828 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9f87323-3041-444f-b26d-c76871bd426f-kube-api-access-z7slx" (OuterVolumeSpecName: "kube-api-access-z7slx") pod "d9f87323-3041-444f-b26d-c76871bd426f" (UID: "d9f87323-3041-444f-b26d-c76871bd426f"). InnerVolumeSpecName "kube-api-access-z7slx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.000915 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9f87323-3041-444f-b26d-c76871bd426f" (UID: "d9f87323-3041-444f-b26d-c76871bd426f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.069638 4791 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.069685 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7slx\" (UniqueName: \"kubernetes.io/projected/d9f87323-3041-444f-b26d-c76871bd426f-kube-api-access-z7slx\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.069699 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f87323-3041-444f-b26d-c76871bd426f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.315578 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7srv6" event={"ID":"d9f87323-3041-444f-b26d-c76871bd426f","Type":"ContainerDied","Data":"ecc83e26f6e43fbdf35da3007c13777e6c70822e74909d6f70d2099499b42196"} Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.315626 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ecc83e26f6e43fbdf35da3007c13777e6c70822e74909d6f70d2099499b42196" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.315686 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7srv6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.317876 4791 generic.go:334] "Generic (PLEG): container finished" podID="ee824c0f-2eaa-4eee-8dcf-f487d9445012" containerID="940d84f1c607e42e27d563201496fe5fee7d39f5ab579471f4695b8ded2bd000" exitCode=0 Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.317982 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5vvss" event={"ID":"ee824c0f-2eaa-4eee-8dcf-f487d9445012","Type":"ContainerDied","Data":"940d84f1c607e42e27d563201496fe5fee7d39f5ab579471f4695b8ded2bd000"} Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.564998 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-655599f54b-bjpp6"] Dec 08 21:43:29 crc kubenswrapper[4791]: E1208 21:43:29.565815 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9f87323-3041-444f-b26d-c76871bd426f" containerName="barbican-db-sync" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.565827 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9f87323-3041-444f-b26d-c76871bd426f" containerName="barbican-db-sync" Dec 08 21:43:29 crc kubenswrapper[4791]: E1208 21:43:29.565855 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" containerName="heat-db-sync" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.565861 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" containerName="heat-db-sync" Dec 08 21:43:29 crc kubenswrapper[4791]: E1208 21:43:29.565883 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74852b39-364a-40e3-8ed8-a24178ee0403" containerName="dnsmasq-dns" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.565891 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="74852b39-364a-40e3-8ed8-a24178ee0403" containerName="dnsmasq-dns" Dec 08 21:43:29 crc kubenswrapper[4791]: E1208 21:43:29.565907 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74852b39-364a-40e3-8ed8-a24178ee0403" containerName="init" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.565914 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="74852b39-364a-40e3-8ed8-a24178ee0403" containerName="init" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.566136 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" containerName="heat-db-sync" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.566157 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="74852b39-364a-40e3-8ed8-a24178ee0403" containerName="dnsmasq-dns" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.566168 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9f87323-3041-444f-b26d-c76871bd426f" containerName="barbican-db-sync" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.567444 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.571474 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.571767 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-78vjm" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.571944 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.584914 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-765b6fcd95-7rp4s"] Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.586883 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.589860 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.621725 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-655599f54b-bjpp6"] Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.632301 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-765b6fcd95-7rp4s"] Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.679820 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jm5mk"] Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.686354 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-config-data\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.686402 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-484pk\" (UniqueName: \"kubernetes.io/projected/85e818fb-c004-4699-86d4-d06e8216ddd3-kube-api-access-484pk\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.686963 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85e818fb-c004-4699-86d4-d06e8216ddd3-logs\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.687077 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.687669 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-combined-ca-bundle\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.687751 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-config-data-custom\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.694299 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jm5mk"] Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789173 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-config-data\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789224 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789246 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-484pk\" (UniqueName: \"kubernetes.io/projected/85e818fb-c004-4699-86d4-d06e8216ddd3-kube-api-access-484pk\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789274 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-combined-ca-bundle\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789321 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85e818fb-c004-4699-86d4-d06e8216ddd3-logs\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789337 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x9c6\" (UniqueName: \"kubernetes.io/projected/ae2c49a8-2981-4fa3-a18a-afa91303d23f-kube-api-access-7x9c6\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789364 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789383 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzfk7\" (UniqueName: \"kubernetes.io/projected/7544de62-e997-46e9-aa5b-584bfce301b1-kube-api-access-wzfk7\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789423 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-combined-ca-bundle\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789465 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-config-data-custom\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789481 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-config-data\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789514 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-config\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789554 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae2c49a8-2981-4fa3-a18a-afa91303d23f-logs\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789586 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-config-data-custom\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789614 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.789655 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.794636 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85e818fb-c004-4699-86d4-d06e8216ddd3-logs\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.801796 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-combined-ca-bundle\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.806936 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-config-data\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.816825 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5d467568c4-lnvsw"] Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.818579 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.827160 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.828343 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85e818fb-c004-4699-86d4-d06e8216ddd3-config-data-custom\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.848795 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-484pk\" (UniqueName: \"kubernetes.io/projected/85e818fb-c004-4699-86d4-d06e8216ddd3-kube-api-access-484pk\") pod \"barbican-keystone-listener-655599f54b-bjpp6\" (UID: \"85e818fb-c004-4699-86d4-d06e8216ddd3\") " pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.863239 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5d467568c4-lnvsw"] Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895428 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895661 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-config-data\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895701 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-config\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895750 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-combined-ca-bundle\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895772 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae2c49a8-2981-4fa3-a18a-afa91303d23f-logs\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895802 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-config-data-custom\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895820 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895846 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895878 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3c537a-85a8-4d90-a23d-8137a6265307-logs\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895912 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895936 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-combined-ca-bundle\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895963 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.895999 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x9c6\" (UniqueName: \"kubernetes.io/projected/ae2c49a8-2981-4fa3-a18a-afa91303d23f-kube-api-access-7x9c6\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.896020 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.896043 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzfk7\" (UniqueName: \"kubernetes.io/projected/7544de62-e997-46e9-aa5b-584bfce301b1-kube-api-access-wzfk7\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.896093 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data-custom\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.896117 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x28hw\" (UniqueName: \"kubernetes.io/projected/5e3c537a-85a8-4d90-a23d-8137a6265307-kube-api-access-x28hw\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.896879 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.897382 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.898673 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.899574 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-config\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.899754 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.900050 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae2c49a8-2981-4fa3-a18a-afa91303d23f-logs\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.910512 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-config-data-custom\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.917528 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-combined-ca-bundle\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.928122 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae2c49a8-2981-4fa3-a18a-afa91303d23f-config-data\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.950402 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x9c6\" (UniqueName: \"kubernetes.io/projected/ae2c49a8-2981-4fa3-a18a-afa91303d23f-kube-api-access-7x9c6\") pod \"barbican-worker-765b6fcd95-7rp4s\" (UID: \"ae2c49a8-2981-4fa3-a18a-afa91303d23f\") " pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:29 crc kubenswrapper[4791]: I1208 21:43:29.967567 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzfk7\" (UniqueName: \"kubernetes.io/projected/7544de62-e997-46e9-aa5b-584bfce301b1-kube-api-access-wzfk7\") pod \"dnsmasq-dns-848cf88cfc-jm5mk\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.007057 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3c537a-85a8-4d90-a23d-8137a6265307-logs\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.007144 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.007229 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data-custom\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.007254 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x28hw\" (UniqueName: \"kubernetes.io/projected/5e3c537a-85a8-4d90-a23d-8137a6265307-kube-api-access-x28hw\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.007317 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-combined-ca-bundle\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.011351 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-combined-ca-bundle\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.014940 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data-custom\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.015167 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3c537a-85a8-4d90-a23d-8137a6265307-logs\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.023887 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.036303 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.048808 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x28hw\" (UniqueName: \"kubernetes.io/projected/5e3c537a-85a8-4d90-a23d-8137a6265307-kube-api-access-x28hw\") pod \"barbican-api-5d467568c4-lnvsw\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.083194 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.220042 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-765b6fcd95-7rp4s" Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.657899 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-655599f54b-bjpp6"] Dec 08 21:43:30 crc kubenswrapper[4791]: I1208 21:43:30.789168 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jm5mk"] Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.034049 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5d467568c4-lnvsw"] Dec 08 21:43:31 crc kubenswrapper[4791]: W1208 21:43:31.047930 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3c537a_85a8_4d90_a23d_8137a6265307.slice/crio-c1dd823abdaf4375c04976f2bd5fe242a0c76bcdd291561858ac0bfc141400da WatchSource:0}: Error finding container c1dd823abdaf4375c04976f2bd5fe242a0c76bcdd291561858ac0bfc141400da: Status 404 returned error can't find the container with id c1dd823abdaf4375c04976f2bd5fe242a0c76bcdd291561858ac0bfc141400da Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.189918 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-765b6fcd95-7rp4s"] Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.209334 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5vvss" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.347979 4791 generic.go:334] "Generic (PLEG): container finished" podID="7544de62-e997-46e9-aa5b-584bfce301b1" containerID="5489bf143597f1f27f59a1aa41b6d8baf281a22544ee4054d50d13ac20c41b78" exitCode=0 Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.348131 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" event={"ID":"7544de62-e997-46e9-aa5b-584bfce301b1","Type":"ContainerDied","Data":"5489bf143597f1f27f59a1aa41b6d8baf281a22544ee4054d50d13ac20c41b78"} Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.348170 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" event={"ID":"7544de62-e997-46e9-aa5b-584bfce301b1","Type":"ContainerStarted","Data":"25c697700da53107c29866e05b86045f72949bb7df09030b25bb2f7421a5ecfb"} Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.352941 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d467568c4-lnvsw" event={"ID":"5e3c537a-85a8-4d90-a23d-8137a6265307","Type":"ContainerStarted","Data":"a75adf3dad44bff2bce5138cb475aa2ec92bd233a5607798e43a2850af056053"} Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.352992 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d467568c4-lnvsw" event={"ID":"5e3c537a-85a8-4d90-a23d-8137a6265307","Type":"ContainerStarted","Data":"c1dd823abdaf4375c04976f2bd5fe242a0c76bcdd291561858ac0bfc141400da"} Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.353081 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-combined-ca-bundle\") pod \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.353146 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-db-sync-config-data\") pod \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.353221 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-config-data\") pod \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.353247 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbpxg\" (UniqueName: \"kubernetes.io/projected/ee824c0f-2eaa-4eee-8dcf-f487d9445012-kube-api-access-xbpxg\") pod \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.353370 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-scripts\") pod \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.353425 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee824c0f-2eaa-4eee-8dcf-f487d9445012-etc-machine-id\") pod \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\" (UID: \"ee824c0f-2eaa-4eee-8dcf-f487d9445012\") " Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.353926 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee824c0f-2eaa-4eee-8dcf-f487d9445012-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ee824c0f-2eaa-4eee-8dcf-f487d9445012" (UID: "ee824c0f-2eaa-4eee-8dcf-f487d9445012"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.358540 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-scripts" (OuterVolumeSpecName: "scripts") pod "ee824c0f-2eaa-4eee-8dcf-f487d9445012" (UID: "ee824c0f-2eaa-4eee-8dcf-f487d9445012"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.359661 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee824c0f-2eaa-4eee-8dcf-f487d9445012-kube-api-access-xbpxg" (OuterVolumeSpecName: "kube-api-access-xbpxg") pod "ee824c0f-2eaa-4eee-8dcf-f487d9445012" (UID: "ee824c0f-2eaa-4eee-8dcf-f487d9445012"). InnerVolumeSpecName "kube-api-access-xbpxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.362530 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5vvss" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.362791 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5vvss" event={"ID":"ee824c0f-2eaa-4eee-8dcf-f487d9445012","Type":"ContainerDied","Data":"f9475771d78b43284416cad40c760485d5ca6cf6ca45e18bcf03ccf08450acab"} Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.362851 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9475771d78b43284416cad40c760485d5ca6cf6ca45e18bcf03ccf08450acab" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.363218 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ee824c0f-2eaa-4eee-8dcf-f487d9445012" (UID: "ee824c0f-2eaa-4eee-8dcf-f487d9445012"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.364580 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-765b6fcd95-7rp4s" event={"ID":"ae2c49a8-2981-4fa3-a18a-afa91303d23f","Type":"ContainerStarted","Data":"f157cfbed7941dc74659b0516662d8f439fe21aad031a98689c29b600987d820"} Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.365816 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" event={"ID":"85e818fb-c004-4699-86d4-d06e8216ddd3","Type":"ContainerStarted","Data":"056584f6e3008127b43efaa309d8ada43cd17d0068d941d1710c55e30c8a7b6b"} Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.406904 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee824c0f-2eaa-4eee-8dcf-f487d9445012" (UID: "ee824c0f-2eaa-4eee-8dcf-f487d9445012"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.448324 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-config-data" (OuterVolumeSpecName: "config-data") pod "ee824c0f-2eaa-4eee-8dcf-f487d9445012" (UID: "ee824c0f-2eaa-4eee-8dcf-f487d9445012"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.457391 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.457427 4791 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.457437 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.457446 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbpxg\" (UniqueName: \"kubernetes.io/projected/ee824c0f-2eaa-4eee-8dcf-f487d9445012-kube-api-access-xbpxg\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.457456 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee824c0f-2eaa-4eee-8dcf-f487d9445012-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.457467 4791 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee824c0f-2eaa-4eee-8dcf-f487d9445012-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.631911 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:31 crc kubenswrapper[4791]: E1208 21:43:31.632322 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee824c0f-2eaa-4eee-8dcf-f487d9445012" containerName="cinder-db-sync" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.632340 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee824c0f-2eaa-4eee-8dcf-f487d9445012" containerName="cinder-db-sync" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.632582 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee824c0f-2eaa-4eee-8dcf-f487d9445012" containerName="cinder-db-sync" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.633824 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.639278 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.642323 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.759535 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jm5mk"] Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.765445 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-scripts\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.765536 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.765850 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.766031 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.766199 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.770239 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqmsj\" (UniqueName: \"kubernetes.io/projected/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-kube-api-access-wqmsj\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.809105 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-fgpqc"] Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.811719 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.872787 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqmsj\" (UniqueName: \"kubernetes.io/projected/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-kube-api-access-wqmsj\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.873162 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-scripts\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.873196 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.873313 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.873382 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.873423 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.887073 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.892093 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.894550 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.896370 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-scripts\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.900901 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.903433 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqmsj\" (UniqueName: \"kubernetes.io/projected/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-kube-api-access-wqmsj\") pod \"cinder-scheduler-0\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.930188 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-fgpqc"] Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.978220 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-config\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.978349 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-svc\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.978402 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w82m\" (UniqueName: \"kubernetes.io/projected/8d81e17e-0d6f-461b-8bd9-8d277de96edd-kube-api-access-2w82m\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.978458 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.978555 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.978624 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.996969 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:31 crc kubenswrapper[4791]: I1208 21:43:31.998287 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.002222 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.005061 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.016467 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.082253 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-config\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.082365 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-svc\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.082416 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w82m\" (UniqueName: \"kubernetes.io/projected/8d81e17e-0d6f-461b-8bd9-8d277de96edd-kube-api-access-2w82m\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.082467 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.082554 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.082609 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.083351 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-config\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.084320 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-svc\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.084441 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.084490 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.084551 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.137421 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w82m\" (UniqueName: \"kubernetes.io/projected/8d81e17e-0d6f-461b-8bd9-8d277de96edd-kube-api-access-2w82m\") pod \"dnsmasq-dns-6578955fd5-fgpqc\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.188807 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-scripts\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.189008 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.189052 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tslr7\" (UniqueName: \"kubernetes.io/projected/07cfca53-c71c-4230-a3ae-2878a0cd37dc-kube-api-access-tslr7\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.189133 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/07cfca53-c71c-4230-a3ae-2878a0cd37dc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.189195 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data-custom\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.189263 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.189294 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07cfca53-c71c-4230-a3ae-2878a0cd37dc-logs\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.230974 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.291500 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/07cfca53-c71c-4230-a3ae-2878a0cd37dc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.291591 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data-custom\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.291663 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.291700 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07cfca53-c71c-4230-a3ae-2878a0cd37dc-logs\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.291794 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-scripts\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.291960 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.291998 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tslr7\" (UniqueName: \"kubernetes.io/projected/07cfca53-c71c-4230-a3ae-2878a0cd37dc-kube-api-access-tslr7\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.294961 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07cfca53-c71c-4230-a3ae-2878a0cd37dc-logs\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.295102 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/07cfca53-c71c-4230-a3ae-2878a0cd37dc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.298553 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data-custom\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.299572 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.300268 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-scripts\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.304599 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.379613 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" event={"ID":"7544de62-e997-46e9-aa5b-584bfce301b1","Type":"ContainerStarted","Data":"acf30894035c1f94909be89736fc1f2077e9f2a22b44a815aee73471911e8fa8"} Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.379908 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" podUID="7544de62-e997-46e9-aa5b-584bfce301b1" containerName="dnsmasq-dns" containerID="cri-o://acf30894035c1f94909be89736fc1f2077e9f2a22b44a815aee73471911e8fa8" gracePeriod=10 Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.380340 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.397690 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d467568c4-lnvsw" event={"ID":"5e3c537a-85a8-4d90-a23d-8137a6265307","Type":"ContainerStarted","Data":"557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a"} Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.399012 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.399443 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.401329 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tslr7\" (UniqueName: \"kubernetes.io/projected/07cfca53-c71c-4230-a3ae-2878a0cd37dc-kube-api-access-tslr7\") pod \"cinder-api-0\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.440837 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" podStartSLOduration=3.440814228 podStartE2EDuration="3.440814228s" podCreationTimestamp="2025-12-08 21:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:32.41605323 +0000 UTC m=+1489.114811585" watchObservedRunningTime="2025-12-08 21:43:32.440814228 +0000 UTC m=+1489.139572573" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.454156 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5d467568c4-lnvsw" podStartSLOduration=3.454134926 podStartE2EDuration="3.454134926s" podCreationTimestamp="2025-12-08 21:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:32.435273882 +0000 UTC m=+1489.134032227" watchObservedRunningTime="2025-12-08 21:43:32.454134926 +0000 UTC m=+1489.152893271" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.543646 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.594766 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:32 crc kubenswrapper[4791]: W1208 21:43:32.620892 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71af1c1b_fb31_4f2a_8bca_bc7265ff57ea.slice/crio-4ecc82be2a074a844416a568a32f12a2db2c911fcb9b488eac6484597a06ba16 WatchSource:0}: Error finding container 4ecc82be2a074a844416a568a32f12a2db2c911fcb9b488eac6484597a06ba16: Status 404 returned error can't find the container with id 4ecc82be2a074a844416a568a32f12a2db2c911fcb9b488eac6484597a06ba16 Dec 08 21:43:32 crc kubenswrapper[4791]: I1208 21:43:32.835624 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-fgpqc"] Dec 08 21:43:33 crc kubenswrapper[4791]: W1208 21:43:33.402241 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d81e17e_0d6f_461b_8bd9_8d277de96edd.slice/crio-b0492c1345a2932fdd3a479cf81fcbea6376237cc4fc7fb76cc6a9230336f0ec WatchSource:0}: Error finding container b0492c1345a2932fdd3a479cf81fcbea6376237cc4fc7fb76cc6a9230336f0ec: Status 404 returned error can't find the container with id b0492c1345a2932fdd3a479cf81fcbea6376237cc4fc7fb76cc6a9230336f0ec Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.416359 4791 generic.go:334] "Generic (PLEG): container finished" podID="7544de62-e997-46e9-aa5b-584bfce301b1" containerID="acf30894035c1f94909be89736fc1f2077e9f2a22b44a815aee73471911e8fa8" exitCode=0 Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.416611 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" event={"ID":"7544de62-e997-46e9-aa5b-584bfce301b1","Type":"ContainerDied","Data":"acf30894035c1f94909be89736fc1f2077e9f2a22b44a815aee73471911e8fa8"} Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.419334 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea","Type":"ContainerStarted","Data":"4ecc82be2a074a844416a568a32f12a2db2c911fcb9b488eac6484597a06ba16"} Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.524085 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.650063 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-swift-storage-0\") pod \"7544de62-e997-46e9-aa5b-584bfce301b1\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.650179 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-config\") pod \"7544de62-e997-46e9-aa5b-584bfce301b1\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.650269 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzfk7\" (UniqueName: \"kubernetes.io/projected/7544de62-e997-46e9-aa5b-584bfce301b1-kube-api-access-wzfk7\") pod \"7544de62-e997-46e9-aa5b-584bfce301b1\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.650300 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-sb\") pod \"7544de62-e997-46e9-aa5b-584bfce301b1\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.650403 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb\") pod \"7544de62-e997-46e9-aa5b-584bfce301b1\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.650456 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-svc\") pod \"7544de62-e997-46e9-aa5b-584bfce301b1\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.726135 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7544de62-e997-46e9-aa5b-584bfce301b1-kube-api-access-wzfk7" (OuterVolumeSpecName: "kube-api-access-wzfk7") pod "7544de62-e997-46e9-aa5b-584bfce301b1" (UID: "7544de62-e997-46e9-aa5b-584bfce301b1"). InnerVolumeSpecName "kube-api-access-wzfk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.753973 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7544de62-e997-46e9-aa5b-584bfce301b1" (UID: "7544de62-e997-46e9-aa5b-584bfce301b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.754756 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb\") pod \"7544de62-e997-46e9-aa5b-584bfce301b1\" (UID: \"7544de62-e997-46e9-aa5b-584bfce301b1\") " Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.755612 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzfk7\" (UniqueName: \"kubernetes.io/projected/7544de62-e997-46e9-aa5b-584bfce301b1-kube-api-access-wzfk7\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4791]: W1208 21:43:33.756738 4791 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/7544de62-e997-46e9-aa5b-584bfce301b1/volumes/kubernetes.io~configmap/ovsdbserver-nb Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.756780 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7544de62-e997-46e9-aa5b-584bfce301b1" (UID: "7544de62-e997-46e9-aa5b-584bfce301b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.760254 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7544de62-e997-46e9-aa5b-584bfce301b1" (UID: "7544de62-e997-46e9-aa5b-584bfce301b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.795629 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-config" (OuterVolumeSpecName: "config") pod "7544de62-e997-46e9-aa5b-584bfce301b1" (UID: "7544de62-e997-46e9-aa5b-584bfce301b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.804614 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7544de62-e997-46e9-aa5b-584bfce301b1" (UID: "7544de62-e997-46e9-aa5b-584bfce301b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.844615 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7544de62-e997-46e9-aa5b-584bfce301b1" (UID: "7544de62-e997-46e9-aa5b-584bfce301b1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.858651 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.858683 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.858693 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.858702 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.858725 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7544de62-e997-46e9-aa5b-584bfce301b1-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4791]: I1208 21:43:33.911636 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.450959 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"07cfca53-c71c-4230-a3ae-2878a0cd37dc","Type":"ContainerStarted","Data":"17b730340990a3b711635a4a5d4bc9b7f00e7d7d5b6d6b023700c9218cd65e92"} Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.453787 4791 generic.go:334] "Generic (PLEG): container finished" podID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerID="d03ecd51440fbad4af5d9acedc54533235477560f6e635611fcf91fe8f7f253a" exitCode=0 Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.453914 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" event={"ID":"8d81e17e-0d6f-461b-8bd9-8d277de96edd","Type":"ContainerDied","Data":"d03ecd51440fbad4af5d9acedc54533235477560f6e635611fcf91fe8f7f253a"} Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.453983 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" event={"ID":"8d81e17e-0d6f-461b-8bd9-8d277de96edd","Type":"ContainerStarted","Data":"b0492c1345a2932fdd3a479cf81fcbea6376237cc4fc7fb76cc6a9230336f0ec"} Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.467463 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.467861 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jm5mk" event={"ID":"7544de62-e997-46e9-aa5b-584bfce301b1","Type":"ContainerDied","Data":"25c697700da53107c29866e05b86045f72949bb7df09030b25bb2f7421a5ecfb"} Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.467918 4791 scope.go:117] "RemoveContainer" containerID="acf30894035c1f94909be89736fc1f2077e9f2a22b44a815aee73471911e8fa8" Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.526566 4791 scope.go:117] "RemoveContainer" containerID="5489bf143597f1f27f59a1aa41b6d8baf281a22544ee4054d50d13ac20c41b78" Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.538253 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jm5mk"] Dec 08 21:43:34 crc kubenswrapper[4791]: I1208 21:43:34.559033 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jm5mk"] Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.480822 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" event={"ID":"85e818fb-c004-4699-86d4-d06e8216ddd3","Type":"ContainerStarted","Data":"e795f81b2006522a360951d3c8570ecd16ba101a7a3b1420ba2d5067f0885a0f"} Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.483952 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" event={"ID":"85e818fb-c004-4699-86d4-d06e8216ddd3","Type":"ContainerStarted","Data":"ce997d0680d7e9cd056ae1984c395a60503d988731c04d11e0ecc5587a31a49b"} Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.486451 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea","Type":"ContainerStarted","Data":"ad2969bdddd066afdaa373e86f0a34734fa79b259438b14979d29b478d4c88b0"} Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.489406 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"07cfca53-c71c-4230-a3ae-2878a0cd37dc","Type":"ContainerStarted","Data":"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa"} Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.491902 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" event={"ID":"8d81e17e-0d6f-461b-8bd9-8d277de96edd","Type":"ContainerStarted","Data":"e231692aaf2208858357bb639ca4381b42ae68294b0ea0dbd48d3790c68d1aa6"} Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.493196 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.496502 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-765b6fcd95-7rp4s" event={"ID":"ae2c49a8-2981-4fa3-a18a-afa91303d23f","Type":"ContainerStarted","Data":"00cb539d7f0af2e0f0fe9e70040f5bb16c85b68fd0cc082762f1c1dfeadbeca0"} Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.496747 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-765b6fcd95-7rp4s" event={"ID":"ae2c49a8-2981-4fa3-a18a-afa91303d23f","Type":"ContainerStarted","Data":"f4710f18aa473187185c05bb1b85b434707a0800eb97b64bf8eddc73f0f3dd21"} Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.503233 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-655599f54b-bjpp6" podStartSLOduration=3.063098161 podStartE2EDuration="6.503206317s" podCreationTimestamp="2025-12-08 21:43:29 +0000 UTC" firstStartedPulling="2025-12-08 21:43:30.722427191 +0000 UTC m=+1487.421185536" lastFinishedPulling="2025-12-08 21:43:34.162535347 +0000 UTC m=+1490.861293692" observedRunningTime="2025-12-08 21:43:35.501061765 +0000 UTC m=+1492.199820110" watchObservedRunningTime="2025-12-08 21:43:35.503206317 +0000 UTC m=+1492.201964662" Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.540082 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" podStartSLOduration=4.540057983 podStartE2EDuration="4.540057983s" podCreationTimestamp="2025-12-08 21:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:35.525433674 +0000 UTC m=+1492.224192019" watchObservedRunningTime="2025-12-08 21:43:35.540057983 +0000 UTC m=+1492.238816328" Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.556105 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-765b6fcd95-7rp4s" podStartSLOduration=3.602856233 podStartE2EDuration="6.556087628s" podCreationTimestamp="2025-12-08 21:43:29 +0000 UTC" firstStartedPulling="2025-12-08 21:43:31.209315512 +0000 UTC m=+1487.908073857" lastFinishedPulling="2025-12-08 21:43:34.162546907 +0000 UTC m=+1490.861305252" observedRunningTime="2025-12-08 21:43:35.548902951 +0000 UTC m=+1492.247661306" watchObservedRunningTime="2025-12-08 21:43:35.556087628 +0000 UTC m=+1492.254845973" Dec 08 21:43:35 crc kubenswrapper[4791]: I1208 21:43:35.623804 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7544de62-e997-46e9-aa5b-584bfce301b1" path="/var/lib/kubelet/pods/7544de62-e997-46e9-aa5b-584bfce301b1/volumes" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.020869 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.508821 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea","Type":"ContainerStarted","Data":"f3d258409642ce92349d81b4dd73c445c7384298480edcbee0aea376d8bc0a3a"} Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.510521 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"07cfca53-c71c-4230-a3ae-2878a0cd37dc","Type":"ContainerStarted","Data":"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec"} Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.534209 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.956624689 podStartE2EDuration="5.534192724s" podCreationTimestamp="2025-12-08 21:43:31 +0000 UTC" firstStartedPulling="2025-12-08 21:43:32.62474294 +0000 UTC m=+1489.323501275" lastFinishedPulling="2025-12-08 21:43:34.202310965 +0000 UTC m=+1490.901069310" observedRunningTime="2025-12-08 21:43:36.53364067 +0000 UTC m=+1493.232399015" watchObservedRunningTime="2025-12-08 21:43:36.534192724 +0000 UTC m=+1493.232951069" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.555849 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.555822645 podStartE2EDuration="5.555822645s" podCreationTimestamp="2025-12-08 21:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:36.555093408 +0000 UTC m=+1493.253851753" watchObservedRunningTime="2025-12-08 21:43:36.555822645 +0000 UTC m=+1493.254581000" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.698879 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-56d9b646cd-6b4tm"] Dec 08 21:43:36 crc kubenswrapper[4791]: E1208 21:43:36.699312 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7544de62-e997-46e9-aa5b-584bfce301b1" containerName="init" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.699329 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="7544de62-e997-46e9-aa5b-584bfce301b1" containerName="init" Dec 08 21:43:36 crc kubenswrapper[4791]: E1208 21:43:36.699355 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7544de62-e997-46e9-aa5b-584bfce301b1" containerName="dnsmasq-dns" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.699361 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="7544de62-e997-46e9-aa5b-584bfce301b1" containerName="dnsmasq-dns" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.699578 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="7544de62-e997-46e9-aa5b-584bfce301b1" containerName="dnsmasq-dns" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.700779 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.702371 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.707871 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.714864 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56d9b646cd-6b4tm"] Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.752561 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46915f99-16c5-4219-83db-3565edb2ea87-logs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.752635 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-config-data-custom\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.752686 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpmk4\" (UniqueName: \"kubernetes.io/projected/46915f99-16c5-4219-83db-3565edb2ea87-kube-api-access-tpmk4\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.752743 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-combined-ca-bundle\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.752787 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-internal-tls-certs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.752818 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-public-tls-certs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.752869 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-config-data\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.855907 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-config-data-custom\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.856359 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpmk4\" (UniqueName: \"kubernetes.io/projected/46915f99-16c5-4219-83db-3565edb2ea87-kube-api-access-tpmk4\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.856440 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-combined-ca-bundle\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.856511 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-internal-tls-certs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.856559 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-public-tls-certs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.856654 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-config-data\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.857402 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46915f99-16c5-4219-83db-3565edb2ea87-logs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.857908 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46915f99-16c5-4219-83db-3565edb2ea87-logs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.862193 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-config-data-custom\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.863502 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-internal-tls-certs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.864805 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-config-data\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.865046 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-public-tls-certs\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.873592 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46915f99-16c5-4219-83db-3565edb2ea87-combined-ca-bundle\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.877467 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpmk4\" (UniqueName: \"kubernetes.io/projected/46915f99-16c5-4219-83db-3565edb2ea87-kube-api-access-tpmk4\") pod \"barbican-api-56d9b646cd-6b4tm\" (UID: \"46915f99-16c5-4219-83db-3565edb2ea87\") " pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:36 crc kubenswrapper[4791]: I1208 21:43:36.999537 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 08 21:43:37 crc kubenswrapper[4791]: I1208 21:43:37.019134 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:37 crc kubenswrapper[4791]: I1208 21:43:37.522532 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api-log" containerID="cri-o://2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa" gracePeriod=30 Dec 08 21:43:37 crc kubenswrapper[4791]: I1208 21:43:37.524049 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 08 21:43:37 crc kubenswrapper[4791]: I1208 21:43:37.524430 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api" containerID="cri-o://965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec" gracePeriod=30 Dec 08 21:43:37 crc kubenswrapper[4791]: I1208 21:43:37.524549 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56d9b646cd-6b4tm"] Dec 08 21:43:37 crc kubenswrapper[4791]: W1208 21:43:37.525047 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46915f99_16c5_4219_83db_3565edb2ea87.slice/crio-4f88a24d7390c0dc8e5c16e2fc1fdeac411c4840a2cd1bab9d05fa439845e217 WatchSource:0}: Error finding container 4f88a24d7390c0dc8e5c16e2fc1fdeac411c4840a2cd1bab9d05fa439845e217: Status 404 returned error can't find the container with id 4f88a24d7390c0dc8e5c16e2fc1fdeac411c4840a2cd1bab9d05fa439845e217 Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.391008 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492093 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data-custom\") pod \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492187 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/07cfca53-c71c-4230-a3ae-2878a0cd37dc-etc-machine-id\") pod \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492248 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07cfca53-c71c-4230-a3ae-2878a0cd37dc-logs\") pod \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492268 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/07cfca53-c71c-4230-a3ae-2878a0cd37dc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "07cfca53-c71c-4230-a3ae-2878a0cd37dc" (UID: "07cfca53-c71c-4230-a3ae-2878a0cd37dc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492506 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tslr7\" (UniqueName: \"kubernetes.io/projected/07cfca53-c71c-4230-a3ae-2878a0cd37dc-kube-api-access-tslr7\") pod \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492548 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data\") pod \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492585 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-combined-ca-bundle\") pod \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492622 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-scripts\") pod \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\" (UID: \"07cfca53-c71c-4230-a3ae-2878a0cd37dc\") " Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.492882 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07cfca53-c71c-4230-a3ae-2878a0cd37dc-logs" (OuterVolumeSpecName: "logs") pod "07cfca53-c71c-4230-a3ae-2878a0cd37dc" (UID: "07cfca53-c71c-4230-a3ae-2878a0cd37dc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.493618 4791 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/07cfca53-c71c-4230-a3ae-2878a0cd37dc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.493649 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07cfca53-c71c-4230-a3ae-2878a0cd37dc-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.500527 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-scripts" (OuterVolumeSpecName: "scripts") pod "07cfca53-c71c-4230-a3ae-2878a0cd37dc" (UID: "07cfca53-c71c-4230-a3ae-2878a0cd37dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.500568 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "07cfca53-c71c-4230-a3ae-2878a0cd37dc" (UID: "07cfca53-c71c-4230-a3ae-2878a0cd37dc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.505986 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07cfca53-c71c-4230-a3ae-2878a0cd37dc-kube-api-access-tslr7" (OuterVolumeSpecName: "kube-api-access-tslr7") pod "07cfca53-c71c-4230-a3ae-2878a0cd37dc" (UID: "07cfca53-c71c-4230-a3ae-2878a0cd37dc"). InnerVolumeSpecName "kube-api-access-tslr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.522833 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07cfca53-c71c-4230-a3ae-2878a0cd37dc" (UID: "07cfca53-c71c-4230-a3ae-2878a0cd37dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.548893 4791 generic.go:334] "Generic (PLEG): container finished" podID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerID="965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec" exitCode=0 Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.548940 4791 generic.go:334] "Generic (PLEG): container finished" podID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerID="2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa" exitCode=143 Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.548972 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.548983 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"07cfca53-c71c-4230-a3ae-2878a0cd37dc","Type":"ContainerDied","Data":"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec"} Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.549034 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"07cfca53-c71c-4230-a3ae-2878a0cd37dc","Type":"ContainerDied","Data":"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa"} Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.549047 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"07cfca53-c71c-4230-a3ae-2878a0cd37dc","Type":"ContainerDied","Data":"17b730340990a3b711635a4a5d4bc9b7f00e7d7d5b6d6b023700c9218cd65e92"} Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.549057 4791 scope.go:117] "RemoveContainer" containerID="965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.552238 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d9b646cd-6b4tm" event={"ID":"46915f99-16c5-4219-83db-3565edb2ea87","Type":"ContainerStarted","Data":"f06d22770477e97c303f2af7e39f6b8868c081276118487f9484f9b294c8578b"} Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.552391 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d9b646cd-6b4tm" event={"ID":"46915f99-16c5-4219-83db-3565edb2ea87","Type":"ContainerStarted","Data":"d4bb28d1920b255b1385033e4c5a0a5660e00fb25565783823e1a4fa14c9d507"} Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.552417 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d9b646cd-6b4tm" event={"ID":"46915f99-16c5-4219-83db-3565edb2ea87","Type":"ContainerStarted","Data":"4f88a24d7390c0dc8e5c16e2fc1fdeac411c4840a2cd1bab9d05fa439845e217"} Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.561306 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data" (OuterVolumeSpecName: "config-data") pod "07cfca53-c71c-4230-a3ae-2878a0cd37dc" (UID: "07cfca53-c71c-4230-a3ae-2878a0cd37dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.584726 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-56d9b646cd-6b4tm" podStartSLOduration=2.584683476 podStartE2EDuration="2.584683476s" podCreationTimestamp="2025-12-08 21:43:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:38.580593715 +0000 UTC m=+1495.279352080" watchObservedRunningTime="2025-12-08 21:43:38.584683476 +0000 UTC m=+1495.283441831" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.595844 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tslr7\" (UniqueName: \"kubernetes.io/projected/07cfca53-c71c-4230-a3ae-2878a0cd37dc-kube-api-access-tslr7\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.596119 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.596152 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.596167 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.596179 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/07cfca53-c71c-4230-a3ae-2878a0cd37dc-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.660537 4791 scope.go:117] "RemoveContainer" containerID="2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.685221 4791 scope.go:117] "RemoveContainer" containerID="965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec" Dec 08 21:43:38 crc kubenswrapper[4791]: E1208 21:43:38.685628 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec\": container with ID starting with 965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec not found: ID does not exist" containerID="965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.685665 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec"} err="failed to get container status \"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec\": rpc error: code = NotFound desc = could not find container \"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec\": container with ID starting with 965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec not found: ID does not exist" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.685701 4791 scope.go:117] "RemoveContainer" containerID="2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa" Dec 08 21:43:38 crc kubenswrapper[4791]: E1208 21:43:38.686369 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa\": container with ID starting with 2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa not found: ID does not exist" containerID="2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.686423 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa"} err="failed to get container status \"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa\": rpc error: code = NotFound desc = could not find container \"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa\": container with ID starting with 2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa not found: ID does not exist" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.686441 4791 scope.go:117] "RemoveContainer" containerID="965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.689139 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec"} err="failed to get container status \"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec\": rpc error: code = NotFound desc = could not find container \"965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec\": container with ID starting with 965ae71f5c81e7bd1fd78904385ac0993c84aa656d88f2155544c9a2064cb6ec not found: ID does not exist" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.689228 4791 scope.go:117] "RemoveContainer" containerID="2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.694724 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa"} err="failed to get container status \"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa\": rpc error: code = NotFound desc = could not find container \"2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa\": container with ID starting with 2954d634d80482b990e0d8f5f3c687dabb98e65759113727bb5d478dcd731efa not found: ID does not exist" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.938691 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.949624 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.967195 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:38 crc kubenswrapper[4791]: E1208 21:43:38.967769 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.967790 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api" Dec 08 21:43:38 crc kubenswrapper[4791]: E1208 21:43:38.967807 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api-log" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.967814 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api-log" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.968015 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api-log" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.968036 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" containerName="cinder-api" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.969585 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.974682 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.976005 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.976900 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 08 21:43:38 crc kubenswrapper[4791]: I1208 21:43:38.985239 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007058 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007133 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41ccdd19-b7c2-4647-92a2-1b3396777cb7-logs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007244 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007350 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-config-data\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007397 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv4gq\" (UniqueName: \"kubernetes.io/projected/41ccdd19-b7c2-4647-92a2-1b3396777cb7-kube-api-access-kv4gq\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007555 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-scripts\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007640 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007920 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-public-tls-certs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.007970 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/41ccdd19-b7c2-4647-92a2-1b3396777cb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110317 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110386 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41ccdd19-b7c2-4647-92a2-1b3396777cb7-logs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110450 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110510 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-config-data\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110548 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv4gq\" (UniqueName: \"kubernetes.io/projected/41ccdd19-b7c2-4647-92a2-1b3396777cb7-kube-api-access-kv4gq\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110644 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-scripts\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110696 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110845 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-public-tls-certs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.110879 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/41ccdd19-b7c2-4647-92a2-1b3396777cb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.111041 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/41ccdd19-b7c2-4647-92a2-1b3396777cb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.111919 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41ccdd19-b7c2-4647-92a2-1b3396777cb7-logs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.115579 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.115826 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-config-data\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.116537 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-scripts\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.117242 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-public-tls-certs\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.120368 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.120659 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41ccdd19-b7c2-4647-92a2-1b3396777cb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.130629 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv4gq\" (UniqueName: \"kubernetes.io/projected/41ccdd19-b7c2-4647-92a2-1b3396777cb7-kube-api-access-kv4gq\") pod \"cinder-api-0\" (UID: \"41ccdd19-b7c2-4647-92a2-1b3396777cb7\") " pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.287836 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.575341 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.575652 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.614571 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07cfca53-c71c-4230-a3ae-2878a0cd37dc" path="/var/lib/kubelet/pods/07cfca53-c71c-4230-a3ae-2878a0cd37dc/volumes" Dec 08 21:43:39 crc kubenswrapper[4791]: W1208 21:43:39.791670 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41ccdd19_b7c2_4647_92a2_1b3396777cb7.slice/crio-43f0075e4b9bd8221b2af054d855250f422a112a149ea842a65ac6f3f97696a9 WatchSource:0}: Error finding container 43f0075e4b9bd8221b2af054d855250f422a112a149ea842a65ac6f3f97696a9: Status 404 returned error can't find the container with id 43f0075e4b9bd8221b2af054d855250f422a112a149ea842a65ac6f3f97696a9 Dec 08 21:43:39 crc kubenswrapper[4791]: I1208 21:43:39.801160 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:43:40 crc kubenswrapper[4791]: I1208 21:43:40.598411 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"41ccdd19-b7c2-4647-92a2-1b3396777cb7","Type":"ContainerStarted","Data":"517fd7623a774fe005b9df4d1e5ed2993614f5d60889ff3bb2a9f12bccb20dd9"} Dec 08 21:43:40 crc kubenswrapper[4791]: I1208 21:43:40.599036 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"41ccdd19-b7c2-4647-92a2-1b3396777cb7","Type":"ContainerStarted","Data":"43f0075e4b9bd8221b2af054d855250f422a112a149ea842a65ac6f3f97696a9"} Dec 08 21:43:41 crc kubenswrapper[4791]: I1208 21:43:41.328751 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:41 crc kubenswrapper[4791]: I1208 21:43:41.612362 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"41ccdd19-b7c2-4647-92a2-1b3396777cb7","Type":"ContainerStarted","Data":"dc03890730bc6b898ab57c6994ee20e501d13785591934ca769ae2259af0013b"} Dec 08 21:43:41 crc kubenswrapper[4791]: I1208 21:43:41.612556 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 08 21:43:41 crc kubenswrapper[4791]: I1208 21:43:41.643620 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.6435923690000003 podStartE2EDuration="3.643592369s" podCreationTimestamp="2025-12-08 21:43:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:41.643187779 +0000 UTC m=+1498.341946124" watchObservedRunningTime="2025-12-08 21:43:41.643592369 +0000 UTC m=+1498.342350714" Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.232912 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.312210 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-qs5ms"] Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.312544 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" podUID="d442da86-b34f-479a-95ac-71368a15d3f9" containerName="dnsmasq-dns" containerID="cri-o://9417584f4336ff9687fae672a5c073284f152e15963f3ae46bd38e96d4cd4552" gracePeriod=10 Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.564406 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.634825 4791 generic.go:334] "Generic (PLEG): container finished" podID="d442da86-b34f-479a-95ac-71368a15d3f9" containerID="9417584f4336ff9687fae672a5c073284f152e15963f3ae46bd38e96d4cd4552" exitCode=0 Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.634903 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" event={"ID":"d442da86-b34f-479a-95ac-71368a15d3f9","Type":"ContainerDied","Data":"9417584f4336ff9687fae672a5c073284f152e15963f3ae46bd38e96d4cd4552"} Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.668581 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 08 21:43:42 crc kubenswrapper[4791]: I1208 21:43:42.726383 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.179152 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.335006 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.378157 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-swift-storage-0\") pod \"d442da86-b34f-479a-95ac-71368a15d3f9\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.378443 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-svc\") pod \"d442da86-b34f-479a-95ac-71368a15d3f9\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.378650 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-sb\") pod \"d442da86-b34f-479a-95ac-71368a15d3f9\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.378776 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-config\") pod \"d442da86-b34f-479a-95ac-71368a15d3f9\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.379106 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzmcb\" (UniqueName: \"kubernetes.io/projected/d442da86-b34f-479a-95ac-71368a15d3f9-kube-api-access-pzmcb\") pod \"d442da86-b34f-479a-95ac-71368a15d3f9\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.379202 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-nb\") pod \"d442da86-b34f-479a-95ac-71368a15d3f9\" (UID: \"d442da86-b34f-479a-95ac-71368a15d3f9\") " Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.394244 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d442da86-b34f-479a-95ac-71368a15d3f9-kube-api-access-pzmcb" (OuterVolumeSpecName: "kube-api-access-pzmcb") pod "d442da86-b34f-479a-95ac-71368a15d3f9" (UID: "d442da86-b34f-479a-95ac-71368a15d3f9"). InnerVolumeSpecName "kube-api-access-pzmcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.460465 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d442da86-b34f-479a-95ac-71368a15d3f9" (UID: "d442da86-b34f-479a-95ac-71368a15d3f9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.482254 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzmcb\" (UniqueName: \"kubernetes.io/projected/d442da86-b34f-479a-95ac-71368a15d3f9-kube-api-access-pzmcb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.482285 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.499112 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-config" (OuterVolumeSpecName: "config") pod "d442da86-b34f-479a-95ac-71368a15d3f9" (UID: "d442da86-b34f-479a-95ac-71368a15d3f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.509214 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d442da86-b34f-479a-95ac-71368a15d3f9" (UID: "d442da86-b34f-479a-95ac-71368a15d3f9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.523933 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d442da86-b34f-479a-95ac-71368a15d3f9" (UID: "d442da86-b34f-479a-95ac-71368a15d3f9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.549278 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d442da86-b34f-479a-95ac-71368a15d3f9" (UID: "d442da86-b34f-479a-95ac-71368a15d3f9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.589225 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.589266 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.589280 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.589291 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d442da86-b34f-479a-95ac-71368a15d3f9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.663266 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.663360 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="cinder-scheduler" containerID="cri-o://ad2969bdddd066afdaa373e86f0a34734fa79b259438b14979d29b478d4c88b0" gracePeriod=30 Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.663608 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-qs5ms" event={"ID":"d442da86-b34f-479a-95ac-71368a15d3f9","Type":"ContainerDied","Data":"60af05e3c97541c558dbb3bc842e3cc3a9254fe592674e0235258556438e4142"} Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.663984 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="probe" containerID="cri-o://f3d258409642ce92349d81b4dd73c445c7384298480edcbee0aea376d8bc0a3a" gracePeriod=30 Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.665335 4791 scope.go:117] "RemoveContainer" containerID="9417584f4336ff9687fae672a5c073284f152e15963f3ae46bd38e96d4cd4552" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.704398 4791 scope.go:117] "RemoveContainer" containerID="0c64f70188c18c717bc664ed05a6c3a67ef3c17b6cf7d49731ddcb785e92fe2d" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.706253 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-947b647f-l42kf" Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.711781 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-qs5ms"] Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.721957 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-qs5ms"] Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.835028 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5cd74df676-2lb7z"] Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.835400 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5cd74df676-2lb7z" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-api" containerID="cri-o://77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494" gracePeriod=30 Dec 08 21:43:43 crc kubenswrapper[4791]: I1208 21:43:43.835926 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5cd74df676-2lb7z" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-httpd" containerID="cri-o://5536c16bac48a9a56d408aa81894ad58d27d0443620da195ca65cc6940dce891" gracePeriod=30 Dec 08 21:43:44 crc kubenswrapper[4791]: I1208 21:43:44.370547 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:44 crc kubenswrapper[4791]: I1208 21:43:44.674349 4791 generic.go:334] "Generic (PLEG): container finished" podID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerID="f3d258409642ce92349d81b4dd73c445c7384298480edcbee0aea376d8bc0a3a" exitCode=0 Dec 08 21:43:44 crc kubenswrapper[4791]: I1208 21:43:44.674740 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea","Type":"ContainerDied","Data":"f3d258409642ce92349d81b4dd73c445c7384298480edcbee0aea376d8bc0a3a"} Dec 08 21:43:44 crc kubenswrapper[4791]: I1208 21:43:44.675927 4791 generic.go:334] "Generic (PLEG): container finished" podID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerID="5536c16bac48a9a56d408aa81894ad58d27d0443620da195ca65cc6940dce891" exitCode=0 Dec 08 21:43:44 crc kubenswrapper[4791]: I1208 21:43:44.675968 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd74df676-2lb7z" event={"ID":"d752bdcc-7c7f-4b73-9052-02a6495248d8","Type":"ContainerDied","Data":"5536c16bac48a9a56d408aa81894ad58d27d0443620da195ca65cc6940dce891"} Dec 08 21:43:45 crc kubenswrapper[4791]: I1208 21:43:45.610815 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d442da86-b34f-479a-95ac-71368a15d3f9" path="/var/lib/kubelet/pods/d442da86-b34f-479a-95ac-71368a15d3f9/volumes" Dec 08 21:43:45 crc kubenswrapper[4791]: I1208 21:43:45.909901 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56d9b646cd-6b4tm" Dec 08 21:43:45 crc kubenswrapper[4791]: I1208 21:43:45.986653 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5d467568c4-lnvsw"] Dec 08 21:43:45 crc kubenswrapper[4791]: I1208 21:43:45.987237 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5d467568c4-lnvsw" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api-log" containerID="cri-o://a75adf3dad44bff2bce5138cb475aa2ec92bd233a5607798e43a2850af056053" gracePeriod=30 Dec 08 21:43:45 crc kubenswrapper[4791]: I1208 21:43:45.987384 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5d467568c4-lnvsw" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api" containerID="cri-o://557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a" gracePeriod=30 Dec 08 21:43:46 crc kubenswrapper[4791]: I1208 21:43:46.708449 4791 generic.go:334] "Generic (PLEG): container finished" podID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerID="ad2969bdddd066afdaa373e86f0a34734fa79b259438b14979d29b478d4c88b0" exitCode=0 Dec 08 21:43:46 crc kubenswrapper[4791]: I1208 21:43:46.708507 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea","Type":"ContainerDied","Data":"ad2969bdddd066afdaa373e86f0a34734fa79b259438b14979d29b478d4c88b0"} Dec 08 21:43:46 crc kubenswrapper[4791]: I1208 21:43:46.711222 4791 generic.go:334] "Generic (PLEG): container finished" podID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerID="a75adf3dad44bff2bce5138cb475aa2ec92bd233a5607798e43a2850af056053" exitCode=143 Dec 08 21:43:46 crc kubenswrapper[4791]: I1208 21:43:46.711245 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d467568c4-lnvsw" event={"ID":"5e3c537a-85a8-4d90-a23d-8137a6265307","Type":"ContainerDied","Data":"a75adf3dad44bff2bce5138cb475aa2ec92bd233a5607798e43a2850af056053"} Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.204734 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.289601 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-scripts\") pod \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.289648 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-combined-ca-bundle\") pod \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.289861 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data-custom\") pod \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.289896 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-etc-machine-id\") pod \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.289949 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqmsj\" (UniqueName: \"kubernetes.io/projected/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-kube-api-access-wqmsj\") pod \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.290053 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data\") pod \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\" (UID: \"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea\") " Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.290136 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" (UID: "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.290783 4791 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.295848 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-kube-api-access-wqmsj" (OuterVolumeSpecName: "kube-api-access-wqmsj") pod "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" (UID: "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea"). InnerVolumeSpecName "kube-api-access-wqmsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.296970 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" (UID: "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.319502 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-scripts" (OuterVolumeSpecName: "scripts") pod "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" (UID: "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.371059 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" (UID: "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.393369 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.393405 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.393414 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.393425 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqmsj\" (UniqueName: \"kubernetes.io/projected/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-kube-api-access-wqmsj\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.435868 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data" (OuterVolumeSpecName: "config-data") pod "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" (UID: "71af1c1b-fb31-4f2a-8bca-bc7265ff57ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.496032 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.732772 4791 generic.go:334] "Generic (PLEG): container finished" podID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerID="77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494" exitCode=0 Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.732860 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd74df676-2lb7z" event={"ID":"d752bdcc-7c7f-4b73-9052-02a6495248d8","Type":"ContainerDied","Data":"77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494"} Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.749322 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"71af1c1b-fb31-4f2a-8bca-bc7265ff57ea","Type":"ContainerDied","Data":"4ecc82be2a074a844416a568a32f12a2db2c911fcb9b488eac6484597a06ba16"} Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.749375 4791 scope.go:117] "RemoveContainer" containerID="f3d258409642ce92349d81b4dd73c445c7384298480edcbee0aea376d8bc0a3a" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.749598 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.784134 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.788321 4791 scope.go:117] "RemoveContainer" containerID="ad2969bdddd066afdaa373e86f0a34734fa79b259438b14979d29b478d4c88b0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.794296 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.826788 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:47 crc kubenswrapper[4791]: E1208 21:43:47.827452 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="probe" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.827483 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="probe" Dec 08 21:43:47 crc kubenswrapper[4791]: E1208 21:43:47.827542 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d442da86-b34f-479a-95ac-71368a15d3f9" containerName="init" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.827554 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d442da86-b34f-479a-95ac-71368a15d3f9" containerName="init" Dec 08 21:43:47 crc kubenswrapper[4791]: E1208 21:43:47.827571 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d442da86-b34f-479a-95ac-71368a15d3f9" containerName="dnsmasq-dns" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.827581 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d442da86-b34f-479a-95ac-71368a15d3f9" containerName="dnsmasq-dns" Dec 08 21:43:47 crc kubenswrapper[4791]: E1208 21:43:47.827601 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="cinder-scheduler" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.827609 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="cinder-scheduler" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.827908 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d442da86-b34f-479a-95ac-71368a15d3f9" containerName="dnsmasq-dns" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.827937 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="cinder-scheduler" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.827967 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" containerName="probe" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.829701 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.833610 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.840197 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.938107 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c020e24e-53b8-4042-a8b6-18cf852464a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.938947 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.938975 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4tvr\" (UniqueName: \"kubernetes.io/projected/c020e24e-53b8-4042-a8b6-18cf852464a3-kube-api-access-g4tvr\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.939120 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.939177 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:47 crc kubenswrapper[4791]: I1208 21:43:47.939261 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.042734 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.042826 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.043030 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c020e24e-53b8-4042-a8b6-18cf852464a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.043145 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.043163 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c020e24e-53b8-4042-a8b6-18cf852464a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.043176 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4tvr\" (UniqueName: \"kubernetes.io/projected/c020e24e-53b8-4042-a8b6-18cf852464a3-kube-api-access-g4tvr\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.043244 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.051218 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.052445 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.055816 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.062070 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4tvr\" (UniqueName: \"kubernetes.io/projected/c020e24e-53b8-4042-a8b6-18cf852464a3-kube-api-access-g4tvr\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.077246 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c020e24e-53b8-4042-a8b6-18cf852464a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"c020e24e-53b8-4042-a8b6-18cf852464a3\") " pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.159964 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.209967 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.351443 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2552t\" (UniqueName: \"kubernetes.io/projected/d752bdcc-7c7f-4b73-9052-02a6495248d8-kube-api-access-2552t\") pod \"d752bdcc-7c7f-4b73-9052-02a6495248d8\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.351832 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-httpd-config\") pod \"d752bdcc-7c7f-4b73-9052-02a6495248d8\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.351894 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-config\") pod \"d752bdcc-7c7f-4b73-9052-02a6495248d8\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.351999 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-ovndb-tls-certs\") pod \"d752bdcc-7c7f-4b73-9052-02a6495248d8\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.352025 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-combined-ca-bundle\") pod \"d752bdcc-7c7f-4b73-9052-02a6495248d8\" (UID: \"d752bdcc-7c7f-4b73-9052-02a6495248d8\") " Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.372918 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d752bdcc-7c7f-4b73-9052-02a6495248d8-kube-api-access-2552t" (OuterVolumeSpecName: "kube-api-access-2552t") pod "d752bdcc-7c7f-4b73-9052-02a6495248d8" (UID: "d752bdcc-7c7f-4b73-9052-02a6495248d8"). InnerVolumeSpecName "kube-api-access-2552t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.376873 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "d752bdcc-7c7f-4b73-9052-02a6495248d8" (UID: "d752bdcc-7c7f-4b73-9052-02a6495248d8"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.432831 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-config" (OuterVolumeSpecName: "config") pod "d752bdcc-7c7f-4b73-9052-02a6495248d8" (UID: "d752bdcc-7c7f-4b73-9052-02a6495248d8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.444940 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d752bdcc-7c7f-4b73-9052-02a6495248d8" (UID: "d752bdcc-7c7f-4b73-9052-02a6495248d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.457392 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.457430 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2552t\" (UniqueName: \"kubernetes.io/projected/d752bdcc-7c7f-4b73-9052-02a6495248d8-kube-api-access-2552t\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.457444 4791 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.457454 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.489226 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "d752bdcc-7c7f-4b73-9052-02a6495248d8" (UID: "d752bdcc-7c7f-4b73-9052-02a6495248d8"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.559777 4791 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d752bdcc-7c7f-4b73-9052-02a6495248d8-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.676283 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.781682 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd74df676-2lb7z" event={"ID":"d752bdcc-7c7f-4b73-9052-02a6495248d8","Type":"ContainerDied","Data":"b38fa60d5d677e6d5deb9a62e7734d0f59f6b1d771718a07842760bc602c2ca4"} Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.781760 4791 scope.go:117] "RemoveContainer" containerID="5536c16bac48a9a56d408aa81894ad58d27d0443620da195ca65cc6940dce891" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.781890 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd74df676-2lb7z" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.799733 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c020e24e-53b8-4042-a8b6-18cf852464a3","Type":"ContainerStarted","Data":"ed00f3c15d929dd1179c07a652fe0e7a4396ab8a4121a2e310eb4ef43877c7d2"} Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.824872 4791 scope.go:117] "RemoveContainer" containerID="77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494" Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.840566 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5cd74df676-2lb7z"] Dec 08 21:43:48 crc kubenswrapper[4791]: I1208 21:43:48.849348 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5cd74df676-2lb7z"] Dec 08 21:43:49 crc kubenswrapper[4791]: E1208 21:43:49.351772 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71af1c1b_fb31_4f2a_8bca_bc7265ff57ea.slice/crio-4ecc82be2a074a844416a568a32f12a2db2c911fcb9b488eac6484597a06ba16\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71af1c1b_fb31_4f2a_8bca_bc7265ff57ea.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice/crio-77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3c537a_85a8_4d90_a23d_8137a6265307.slice/crio-conmon-557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3c537a_85a8_4d90_a23d_8137a6265307.slice/crio-557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice/crio-conmon-77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice/crio-b38fa60d5d677e6d5deb9a62e7734d0f59f6b1d771718a07842760bc602c2ca4\": RecentStats: unable to find data in memory cache]" Dec 08 21:43:49 crc kubenswrapper[4791]: E1208 21:43:49.352174 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice/crio-conmon-77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71af1c1b_fb31_4f2a_8bca_bc7265ff57ea.slice/crio-4ecc82be2a074a844416a568a32f12a2db2c911fcb9b488eac6484597a06ba16\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71af1c1b_fb31_4f2a_8bca_bc7265ff57ea.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3c537a_85a8_4d90_a23d_8137a6265307.slice/crio-557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice/crio-77ed06f8bfcec88de46e9e7bc3a7d244ea8997488ff0fa1e9321ec155412c494.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd752bdcc_7c7f_4b73_9052_02a6495248d8.slice/crio-b38fa60d5d677e6d5deb9a62e7734d0f59f6b1d771718a07842760bc602c2ca4\": RecentStats: unable to find data in memory cache]" Dec 08 21:43:49 crc kubenswrapper[4791]: E1208 21:43:49.354962 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3c537a_85a8_4d90_a23d_8137a6265307.slice/crio-conmon-557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:43:49 crc kubenswrapper[4791]: I1208 21:43:49.463767 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:49 crc kubenswrapper[4791]: I1208 21:43:49.523633 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-76fcb88b6d-mffmn" Dec 08 21:43:49 crc kubenswrapper[4791]: I1208 21:43:49.646690 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71af1c1b-fb31-4f2a-8bca-bc7265ff57ea" path="/var/lib/kubelet/pods/71af1c1b-fb31-4f2a-8bca-bc7265ff57ea/volumes" Dec 08 21:43:49 crc kubenswrapper[4791]: I1208 21:43:49.647939 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" path="/var/lib/kubelet/pods/d752bdcc-7c7f-4b73-9052-02a6495248d8/volumes" Dec 08 21:43:49 crc kubenswrapper[4791]: I1208 21:43:49.835728 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c020e24e-53b8-4042-a8b6-18cf852464a3","Type":"ContainerStarted","Data":"293ec27a878ef35b9aff64ae055cd26c0bc3462ae26b54b486d7cd1d202e6f9c"} Dec 08 21:43:49 crc kubenswrapper[4791]: I1208 21:43:49.853094 4791 generic.go:334] "Generic (PLEG): container finished" podID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerID="557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a" exitCode=0 Dec 08 21:43:49 crc kubenswrapper[4791]: I1208 21:43:49.853193 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d467568c4-lnvsw" event={"ID":"5e3c537a-85a8-4d90-a23d-8137a6265307","Type":"ContainerDied","Data":"557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a"} Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.264359 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.445266 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3c537a-85a8-4d90-a23d-8137a6265307-logs\") pod \"5e3c537a-85a8-4d90-a23d-8137a6265307\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.445543 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x28hw\" (UniqueName: \"kubernetes.io/projected/5e3c537a-85a8-4d90-a23d-8137a6265307-kube-api-access-x28hw\") pod \"5e3c537a-85a8-4d90-a23d-8137a6265307\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.445627 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data-custom\") pod \"5e3c537a-85a8-4d90-a23d-8137a6265307\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.445750 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data\") pod \"5e3c537a-85a8-4d90-a23d-8137a6265307\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.446080 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e3c537a-85a8-4d90-a23d-8137a6265307-logs" (OuterVolumeSpecName: "logs") pod "5e3c537a-85a8-4d90-a23d-8137a6265307" (UID: "5e3c537a-85a8-4d90-a23d-8137a6265307"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.447134 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-combined-ca-bundle\") pod \"5e3c537a-85a8-4d90-a23d-8137a6265307\" (UID: \"5e3c537a-85a8-4d90-a23d-8137a6265307\") " Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.448057 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3c537a-85a8-4d90-a23d-8137a6265307-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.453786 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e3c537a-85a8-4d90-a23d-8137a6265307-kube-api-access-x28hw" (OuterVolumeSpecName: "kube-api-access-x28hw") pod "5e3c537a-85a8-4d90-a23d-8137a6265307" (UID: "5e3c537a-85a8-4d90-a23d-8137a6265307"). InnerVolumeSpecName "kube-api-access-x28hw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.456006 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5e3c537a-85a8-4d90-a23d-8137a6265307" (UID: "5e3c537a-85a8-4d90-a23d-8137a6265307"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.489075 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e3c537a-85a8-4d90-a23d-8137a6265307" (UID: "5e3c537a-85a8-4d90-a23d-8137a6265307"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.517921 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data" (OuterVolumeSpecName: "config-data") pod "5e3c537a-85a8-4d90-a23d-8137a6265307" (UID: "5e3c537a-85a8-4d90-a23d-8137a6265307"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.549949 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x28hw\" (UniqueName: \"kubernetes.io/projected/5e3c537a-85a8-4d90-a23d-8137a6265307-kube-api-access-x28hw\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.549998 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.550010 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.550020 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3c537a-85a8-4d90-a23d-8137a6265307-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.685315 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5fd957fc96-br4ld" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.866641 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c020e24e-53b8-4042-a8b6-18cf852464a3","Type":"ContainerStarted","Data":"924f98f91f928a9bf09388b1b7adb76023a360f20ed93bd1dd1e2c54ce07a684"} Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.871128 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d467568c4-lnvsw" event={"ID":"5e3c537a-85a8-4d90-a23d-8137a6265307","Type":"ContainerDied","Data":"c1dd823abdaf4375c04976f2bd5fe242a0c76bcdd291561858ac0bfc141400da"} Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.871191 4791 scope.go:117] "RemoveContainer" containerID="557b78b5681db765fe65b64239f7cde042162ee824ea3be418861868cf30b11a" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.871355 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d467568c4-lnvsw" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.902065 4791 scope.go:117] "RemoveContainer" containerID="a75adf3dad44bff2bce5138cb475aa2ec92bd233a5607798e43a2850af056053" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.924417 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.924374098 podStartE2EDuration="3.924374098s" podCreationTimestamp="2025-12-08 21:43:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:50.896184365 +0000 UTC m=+1507.594942710" watchObservedRunningTime="2025-12-08 21:43:50.924374098 +0000 UTC m=+1507.623132443" Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.942592 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5d467568c4-lnvsw"] Dec 08 21:43:50 crc kubenswrapper[4791]: I1208 21:43:50.954308 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5d467568c4-lnvsw"] Dec 08 21:43:51 crc kubenswrapper[4791]: I1208 21:43:51.611452 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" path="/var/lib/kubelet/pods/5e3c537a-85a8-4d90-a23d-8137a6265307/volumes" Dec 08 21:43:51 crc kubenswrapper[4791]: I1208 21:43:51.835813 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 08 21:43:53 crc kubenswrapper[4791]: I1208 21:43:53.160124 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.084308 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5d467568c4-lnvsw" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.187:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.084351 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5d467568c4-lnvsw" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.187:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.461284 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-868dn"] Dec 08 21:43:55 crc kubenswrapper[4791]: E1208 21:43:55.461750 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.461763 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api" Dec 08 21:43:55 crc kubenswrapper[4791]: E1208 21:43:55.461784 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-api" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.461790 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-api" Dec 08 21:43:55 crc kubenswrapper[4791]: E1208 21:43:55.461816 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-httpd" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.461825 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-httpd" Dec 08 21:43:55 crc kubenswrapper[4791]: E1208 21:43:55.461867 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api-log" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.461874 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api-log" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.462067 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-httpd" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.462084 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api-log" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.462100 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e3c537a-85a8-4d90-a23d-8137a6265307" containerName="barbican-api" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.462113 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d752bdcc-7c7f-4b73-9052-02a6495248d8" containerName="neutron-api" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.462859 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.485632 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-868dn"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.564658 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-7j742"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.567748 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.577474 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-7j742"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.588068 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-222ms\" (UniqueName: \"kubernetes.io/projected/cc095445-ea14-4648-9198-f86b355ec210-kube-api-access-222ms\") pod \"nova-api-db-create-868dn\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.588195 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc095445-ea14-4648-9198-f86b355ec210-operator-scripts\") pod \"nova-api-db-create-868dn\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.672637 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-ncw6l"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.674961 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.688208 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-8be1-account-create-update-k6gx9"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.689863 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.690010 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc095445-ea14-4648-9198-f86b355ec210-operator-scripts\") pod \"nova-api-db-create-868dn\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.690109 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmvqf\" (UniqueName: \"kubernetes.io/projected/32971ca2-45d0-455e-9bd3-3452c7d044e0-kube-api-access-xmvqf\") pod \"nova-cell1-db-create-ncw6l\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.690186 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32971ca2-45d0-455e-9bd3-3452c7d044e0-operator-scripts\") pod \"nova-cell1-db-create-ncw6l\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.690204 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6fp5\" (UniqueName: \"kubernetes.io/projected/5ffcf886-dc11-49ff-9ab5-ee93d739852e-kube-api-access-q6fp5\") pod \"nova-cell0-db-create-7j742\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.690278 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-222ms\" (UniqueName: \"kubernetes.io/projected/cc095445-ea14-4648-9198-f86b355ec210-kube-api-access-222ms\") pod \"nova-api-db-create-868dn\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.690318 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffcf886-dc11-49ff-9ab5-ee93d739852e-operator-scripts\") pod \"nova-cell0-db-create-7j742\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.692536 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc095445-ea14-4648-9198-f86b355ec210-operator-scripts\") pod \"nova-api-db-create-868dn\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.700091 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.701371 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-ncw6l"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.713235 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8be1-account-create-update-k6gx9"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.721020 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-222ms\" (UniqueName: \"kubernetes.io/projected/cc095445-ea14-4648-9198-f86b355ec210-kube-api-access-222ms\") pod \"nova-api-db-create-868dn\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.792787 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffcf886-dc11-49ff-9ab5-ee93d739852e-operator-scripts\") pod \"nova-cell0-db-create-7j742\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.793572 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffcf886-dc11-49ff-9ab5-ee93d739852e-operator-scripts\") pod \"nova-cell0-db-create-7j742\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.794200 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmvqf\" (UniqueName: \"kubernetes.io/projected/32971ca2-45d0-455e-9bd3-3452c7d044e0-kube-api-access-xmvqf\") pod \"nova-cell1-db-create-ncw6l\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.794353 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32971ca2-45d0-455e-9bd3-3452c7d044e0-operator-scripts\") pod \"nova-cell1-db-create-ncw6l\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.794435 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6fp5\" (UniqueName: \"kubernetes.io/projected/5ffcf886-dc11-49ff-9ab5-ee93d739852e-kube-api-access-q6fp5\") pod \"nova-cell0-db-create-7j742\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.794552 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz42n\" (UniqueName: \"kubernetes.io/projected/9a9c9a16-208d-4c67-bdb5-8300013965ff-kube-api-access-tz42n\") pod \"nova-api-8be1-account-create-update-k6gx9\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.794673 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a9c9a16-208d-4c67-bdb5-8300013965ff-operator-scripts\") pod \"nova-api-8be1-account-create-update-k6gx9\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.795238 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32971ca2-45d0-455e-9bd3-3452c7d044e0-operator-scripts\") pod \"nova-cell1-db-create-ncw6l\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.798526 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-868dn" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.813468 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmvqf\" (UniqueName: \"kubernetes.io/projected/32971ca2-45d0-455e-9bd3-3452c7d044e0-kube-api-access-xmvqf\") pod \"nova-cell1-db-create-ncw6l\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.815475 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6fp5\" (UniqueName: \"kubernetes.io/projected/5ffcf886-dc11-49ff-9ab5-ee93d739852e-kube-api-access-q6fp5\") pod \"nova-cell0-db-create-7j742\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.891398 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.898018 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz42n\" (UniqueName: \"kubernetes.io/projected/9a9c9a16-208d-4c67-bdb5-8300013965ff-kube-api-access-tz42n\") pod \"nova-api-8be1-account-create-update-k6gx9\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.898067 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a9c9a16-208d-4c67-bdb5-8300013965ff-operator-scripts\") pod \"nova-api-8be1-account-create-update-k6gx9\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.907483 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a9c9a16-208d-4c67-bdb5-8300013965ff-operator-scripts\") pod \"nova-api-8be1-account-create-update-k6gx9\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.907605 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-f316-account-create-update-pwkqh"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.909487 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.918204 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.946457 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz42n\" (UniqueName: \"kubernetes.io/projected/9a9c9a16-208d-4c67-bdb5-8300013965ff-kube-api-access-tz42n\") pod \"nova-api-8be1-account-create-update-k6gx9\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.974985 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f316-account-create-update-pwkqh"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.993760 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 08 21:43:55 crc kubenswrapper[4791]: I1208 21:43:55.995305 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.003333 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bab6978-7963-4e17-aa8b-a814764f4393-operator-scripts\") pod \"nova-cell0-f316-account-create-update-pwkqh\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.003392 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq8qj\" (UniqueName: \"kubernetes.io/projected/5bab6978-7963-4e17-aa8b-a814764f4393-kube-api-access-dq8qj\") pod \"nova-cell0-f316-account-create-update-pwkqh\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.003748 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.004136 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.004259 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-cl6px" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.004372 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.010994 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.021482 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.039900 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-8dcddd8f7-nk4tp"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.044774 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.051938 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.069587 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.071285 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.085372 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8dcddd8f7-nk4tp"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.113472 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bab6978-7963-4e17-aa8b-a814764f4393-operator-scripts\") pod \"nova-cell0-f316-account-create-update-pwkqh\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.113564 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config-secret\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.113644 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq8qj\" (UniqueName: \"kubernetes.io/projected/5bab6978-7963-4e17-aa8b-a814764f4393-kube-api-access-dq8qj\") pod \"nova-cell0-f316-account-create-update-pwkqh\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.113780 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.114140 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7twqc\" (UniqueName: \"kubernetes.io/projected/79f21b28-25e8-4260-a133-910ab353ed8c-kube-api-access-7twqc\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.114188 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.115493 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bab6978-7963-4e17-aa8b-a814764f4393-operator-scripts\") pod \"nova-cell0-f316-account-create-update-pwkqh\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.131575 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-5219-account-create-update-jxhzk"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.155503 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.158688 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.160512 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq8qj\" (UniqueName: \"kubernetes.io/projected/5bab6978-7963-4e17-aa8b-a814764f4393-kube-api-access-dq8qj\") pod \"nova-cell0-f316-account-create-update-pwkqh\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.181758 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5219-account-create-update-jxhzk"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.221164 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-public-tls-certs\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.221659 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7twqc\" (UniqueName: \"kubernetes.io/projected/79f21b28-25e8-4260-a133-910ab353ed8c-kube-api-access-7twqc\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.221691 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.221964 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config-secret\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222020 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-combined-ca-bundle\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222078 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222106 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-config-data\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222122 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1a347204-ba19-40d2-8afa-48549be35c18-log-httpd\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222155 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1a347204-ba19-40d2-8afa-48549be35c18-etc-swift\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222182 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1a347204-ba19-40d2-8afa-48549be35c18-run-httpd\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222594 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phmlm\" (UniqueName: \"kubernetes.io/projected/1a347204-ba19-40d2-8afa-48549be35c18-kube-api-access-phmlm\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.222626 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-internal-tls-certs\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.223959 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.236272 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config-secret\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.246751 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.270648 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7twqc\" (UniqueName: \"kubernetes.io/projected/79f21b28-25e8-4260-a133-910ab353ed8c-kube-api-access-7twqc\") pod \"openstackclient\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324571 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-combined-ca-bundle\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324665 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-config-data\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324694 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1a347204-ba19-40d2-8afa-48549be35c18-log-httpd\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324741 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1a347204-ba19-40d2-8afa-48549be35c18-etc-swift\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324773 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1a347204-ba19-40d2-8afa-48549be35c18-run-httpd\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324808 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-operator-scripts\") pod \"nova-cell1-5219-account-create-update-jxhzk\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324889 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvdzx\" (UniqueName: \"kubernetes.io/projected/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-kube-api-access-kvdzx\") pod \"nova-cell1-5219-account-create-update-jxhzk\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phmlm\" (UniqueName: \"kubernetes.io/projected/1a347204-ba19-40d2-8afa-48549be35c18-kube-api-access-phmlm\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.324966 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-internal-tls-certs\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.325025 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-public-tls-certs\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.328030 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1a347204-ba19-40d2-8afa-48549be35c18-log-httpd\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.329991 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1a347204-ba19-40d2-8afa-48549be35c18-run-httpd\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.330839 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-config-data\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.336354 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-internal-tls-certs\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.337665 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-combined-ca-bundle\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.339599 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1a347204-ba19-40d2-8afa-48549be35c18-etc-swift\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.343457 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a347204-ba19-40d2-8afa-48549be35c18-public-tls-certs\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.367070 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phmlm\" (UniqueName: \"kubernetes.io/projected/1a347204-ba19-40d2-8afa-48549be35c18-kube-api-access-phmlm\") pod \"swift-proxy-8dcddd8f7-nk4tp\" (UID: \"1a347204-ba19-40d2-8afa-48549be35c18\") " pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.409381 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.428916 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-operator-scripts\") pod \"nova-cell1-5219-account-create-update-jxhzk\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.429007 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvdzx\" (UniqueName: \"kubernetes.io/projected/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-kube-api-access-kvdzx\") pod \"nova-cell1-5219-account-create-update-jxhzk\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.430499 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-operator-scripts\") pod \"nova-cell1-5219-account-create-update-jxhzk\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.453341 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvdzx\" (UniqueName: \"kubernetes.io/projected/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-kube-api-access-kvdzx\") pod \"nova-cell1-5219-account-create-update-jxhzk\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.504313 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.535919 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.560378 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.687596 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-868dn"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.852059 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-ncw6l"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.864007 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-7j742"] Dec 08 21:43:56 crc kubenswrapper[4791]: I1208 21:43:56.993570 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-868dn" event={"ID":"cc095445-ea14-4648-9198-f86b355ec210","Type":"ContainerStarted","Data":"df51093b262150b22657dda7fd97e0189778573c6c387ac3412bcb8ff13e410f"} Dec 08 21:43:57 crc kubenswrapper[4791]: I1208 21:43:57.000457 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ncw6l" event={"ID":"32971ca2-45d0-455e-9bd3-3452c7d044e0","Type":"ContainerStarted","Data":"ca79cd89c14af9fe77e6f3ac6eaa2a5e73bee40803d6c115c213453de76dd7fa"} Dec 08 21:43:57 crc kubenswrapper[4791]: I1208 21:43:57.002070 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7j742" event={"ID":"5ffcf886-dc11-49ff-9ab5-ee93d739852e","Type":"ContainerStarted","Data":"1826d4e8faae3e82df488511ee20881afb4b56268d0d43c75762e8e674d3c870"} Dec 08 21:43:57 crc kubenswrapper[4791]: I1208 21:43:57.010061 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8be1-account-create-update-k6gx9"] Dec 08 21:43:57 crc kubenswrapper[4791]: W1208 21:43:57.018574 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a9c9a16_208d_4c67_bdb5_8300013965ff.slice/crio-929dbf6c2133b7456a8679b62bbf724d4ea194f2c7b8d265a6855367eae20a2e WatchSource:0}: Error finding container 929dbf6c2133b7456a8679b62bbf724d4ea194f2c7b8d265a6855367eae20a2e: Status 404 returned error can't find the container with id 929dbf6c2133b7456a8679b62bbf724d4ea194f2c7b8d265a6855367eae20a2e Dec 08 21:43:57 crc kubenswrapper[4791]: I1208 21:43:57.257301 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f316-account-create-update-pwkqh"] Dec 08 21:43:57 crc kubenswrapper[4791]: I1208 21:43:57.406112 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5219-account-create-update-jxhzk"] Dec 08 21:43:57 crc kubenswrapper[4791]: I1208 21:43:57.417489 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 08 21:43:57 crc kubenswrapper[4791]: I1208 21:43:57.691360 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8dcddd8f7-nk4tp"] Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.021605 4791 generic.go:334] "Generic (PLEG): container finished" podID="cc095445-ea14-4648-9198-f86b355ec210" containerID="e9e1e087c6ccf387445b0f54b9b52098ea90eb510a2ae283a68d6c9d320b4361" exitCode=0 Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.021877 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-868dn" event={"ID":"cc095445-ea14-4648-9198-f86b355ec210","Type":"ContainerDied","Data":"e9e1e087c6ccf387445b0f54b9b52098ea90eb510a2ae283a68d6c9d320b4361"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.024772 4791 generic.go:334] "Generic (PLEG): container finished" podID="32971ca2-45d0-455e-9bd3-3452c7d044e0" containerID="30b8b2ba637299d4be16caffee728b676448aa3e76ba0c34ecbef84b14405d8f" exitCode=0 Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.024906 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ncw6l" event={"ID":"32971ca2-45d0-455e-9bd3-3452c7d044e0","Type":"ContainerDied","Data":"30b8b2ba637299d4be16caffee728b676448aa3e76ba0c34ecbef84b14405d8f"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.029198 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5219-account-create-update-jxhzk" event={"ID":"73358556-e1fc-4ffe-a4ce-5b0c131b5c10","Type":"ContainerStarted","Data":"c7bbaca276f1cafafae192e9d1283f40fb4b946dbbd9429897ee65bda7dcdce5"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.031327 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"79f21b28-25e8-4260-a133-910ab353ed8c","Type":"ContainerStarted","Data":"281b29554d1f22b8cda6e8d40a01ed9283bd70be092458b6167436039b7051cd"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.035760 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" event={"ID":"1a347204-ba19-40d2-8afa-48549be35c18","Type":"ContainerStarted","Data":"db2a7b4692337537bdb8ee5026c38f2771fb51bb30eedf565a0689f5da6279df"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.038381 4791 generic.go:334] "Generic (PLEG): container finished" podID="5bab6978-7963-4e17-aa8b-a814764f4393" containerID="1ce86c557a404fcbe7a734119c146d96bbaaf5b5a63dcf1c5fbaab4242179618" exitCode=0 Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.038444 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f316-account-create-update-pwkqh" event={"ID":"5bab6978-7963-4e17-aa8b-a814764f4393","Type":"ContainerDied","Data":"1ce86c557a404fcbe7a734119c146d96bbaaf5b5a63dcf1c5fbaab4242179618"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.038473 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f316-account-create-update-pwkqh" event={"ID":"5bab6978-7963-4e17-aa8b-a814764f4393","Type":"ContainerStarted","Data":"fb8db3005fd9d63adb6766dd2d1859281f301329bea314b3e76a860c5606e78b"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.041410 4791 generic.go:334] "Generic (PLEG): container finished" podID="5ffcf886-dc11-49ff-9ab5-ee93d739852e" containerID="3dd8e973eb5c92752716d38c73665cdbcf4339027926db29ac71b2ce5e6c3bb4" exitCode=0 Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.041633 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7j742" event={"ID":"5ffcf886-dc11-49ff-9ab5-ee93d739852e","Type":"ContainerDied","Data":"3dd8e973eb5c92752716d38c73665cdbcf4339027926db29ac71b2ce5e6c3bb4"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.044408 4791 generic.go:334] "Generic (PLEG): container finished" podID="9a9c9a16-208d-4c67-bdb5-8300013965ff" containerID="b139cd64f0e7ab7c3c66542847ce7e165093eee97f2b23bbde953f003e7f459e" exitCode=0 Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.044619 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8be1-account-create-update-k6gx9" event={"ID":"9a9c9a16-208d-4c67-bdb5-8300013965ff","Type":"ContainerDied","Data":"b139cd64f0e7ab7c3c66542847ce7e165093eee97f2b23bbde953f003e7f459e"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.044740 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8be1-account-create-update-k6gx9" event={"ID":"9a9c9a16-208d-4c67-bdb5-8300013965ff","Type":"ContainerStarted","Data":"929dbf6c2133b7456a8679b62bbf724d4ea194f2c7b8d265a6855367eae20a2e"} Dec 08 21:43:58 crc kubenswrapper[4791]: I1208 21:43:58.487625 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.057796 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" event={"ID":"1a347204-ba19-40d2-8afa-48549be35c18","Type":"ContainerStarted","Data":"823c863cf53ea145fd914bb26e727e4ebb85dd3f397b47fffa408e1df7f6ff78"} Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.059340 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" event={"ID":"1a347204-ba19-40d2-8afa-48549be35c18","Type":"ContainerStarted","Data":"17e344ab293deef6c12b44e52d2b31985e3684f36d3ee2179c67d28cd5006976"} Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.059672 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.060001 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.060259 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5219-account-create-update-jxhzk" event={"ID":"73358556-e1fc-4ffe-a4ce-5b0c131b5c10","Type":"ContainerDied","Data":"cbda81db0160321f783877c28abbc1bd4331fd4fa74bf5a71ca6fbe3f4332249"} Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.060096 4791 generic.go:334] "Generic (PLEG): container finished" podID="73358556-e1fc-4ffe-a4ce-5b0c131b5c10" containerID="cbda81db0160321f783877c28abbc1bd4331fd4fa74bf5a71ca6fbe3f4332249" exitCode=0 Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.081638 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" podStartSLOduration=4.081609845 podStartE2EDuration="4.081609845s" podCreationTimestamp="2025-12-08 21:43:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:59.07369485 +0000 UTC m=+1515.772453225" watchObservedRunningTime="2025-12-08 21:43:59.081609845 +0000 UTC m=+1515.780368190" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.579319 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.721968 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6fp5\" (UniqueName: \"kubernetes.io/projected/5ffcf886-dc11-49ff-9ab5-ee93d739852e-kube-api-access-q6fp5\") pod \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.722019 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffcf886-dc11-49ff-9ab5-ee93d739852e-operator-scripts\") pod \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\" (UID: \"5ffcf886-dc11-49ff-9ab5-ee93d739852e\") " Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.724624 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ffcf886-dc11-49ff-9ab5-ee93d739852e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5ffcf886-dc11-49ff-9ab5-ee93d739852e" (UID: "5ffcf886-dc11-49ff-9ab5-ee93d739852e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.734282 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ffcf886-dc11-49ff-9ab5-ee93d739852e-kube-api-access-q6fp5" (OuterVolumeSpecName: "kube-api-access-q6fp5") pod "5ffcf886-dc11-49ff-9ab5-ee93d739852e" (UID: "5ffcf886-dc11-49ff-9ab5-ee93d739852e"). InnerVolumeSpecName "kube-api-access-q6fp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.826043 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6fp5\" (UniqueName: \"kubernetes.io/projected/5ffcf886-dc11-49ff-9ab5-ee93d739852e-kube-api-access-q6fp5\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:59 crc kubenswrapper[4791]: I1208 21:43:59.826338 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffcf886-dc11-49ff-9ab5-ee93d739852e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.078049 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-868dn" event={"ID":"cc095445-ea14-4648-9198-f86b355ec210","Type":"ContainerDied","Data":"df51093b262150b22657dda7fd97e0189778573c6c387ac3412bcb8ff13e410f"} Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.078344 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df51093b262150b22657dda7fd97e0189778573c6c387ac3412bcb8ff13e410f" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.081726 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ncw6l" event={"ID":"32971ca2-45d0-455e-9bd3-3452c7d044e0","Type":"ContainerDied","Data":"ca79cd89c14af9fe77e6f3ac6eaa2a5e73bee40803d6c115c213453de76dd7fa"} Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.081769 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca79cd89c14af9fe77e6f3ac6eaa2a5e73bee40803d6c115c213453de76dd7fa" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.088187 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f316-account-create-update-pwkqh" event={"ID":"5bab6978-7963-4e17-aa8b-a814764f4393","Type":"ContainerDied","Data":"fb8db3005fd9d63adb6766dd2d1859281f301329bea314b3e76a860c5606e78b"} Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.088238 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb8db3005fd9d63adb6766dd2d1859281f301329bea314b3e76a860c5606e78b" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.089931 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7j742" event={"ID":"5ffcf886-dc11-49ff-9ab5-ee93d739852e","Type":"ContainerDied","Data":"1826d4e8faae3e82df488511ee20881afb4b56268d0d43c75762e8e674d3c870"} Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.089959 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1826d4e8faae3e82df488511ee20881afb4b56268d0d43c75762e8e674d3c870" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.090038 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j742" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.108427 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8be1-account-create-update-k6gx9" event={"ID":"9a9c9a16-208d-4c67-bdb5-8300013965ff","Type":"ContainerDied","Data":"929dbf6c2133b7456a8679b62bbf724d4ea194f2c7b8d265a6855367eae20a2e"} Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.108469 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="929dbf6c2133b7456a8679b62bbf724d4ea194f2c7b8d265a6855367eae20a2e" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.187371 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.219177 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-868dn" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.245185 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.261264 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.341121 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc095445-ea14-4648-9198-f86b355ec210-operator-scripts\") pod \"cc095445-ea14-4648-9198-f86b355ec210\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.341188 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a9c9a16-208d-4c67-bdb5-8300013965ff-operator-scripts\") pod \"9a9c9a16-208d-4c67-bdb5-8300013965ff\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.341742 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc095445-ea14-4648-9198-f86b355ec210-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cc095445-ea14-4648-9198-f86b355ec210" (UID: "cc095445-ea14-4648-9198-f86b355ec210"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.341230 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq8qj\" (UniqueName: \"kubernetes.io/projected/5bab6978-7963-4e17-aa8b-a814764f4393-kube-api-access-dq8qj\") pod \"5bab6978-7963-4e17-aa8b-a814764f4393\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.341957 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bab6978-7963-4e17-aa8b-a814764f4393-operator-scripts\") pod \"5bab6978-7963-4e17-aa8b-a814764f4393\" (UID: \"5bab6978-7963-4e17-aa8b-a814764f4393\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.342076 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmvqf\" (UniqueName: \"kubernetes.io/projected/32971ca2-45d0-455e-9bd3-3452c7d044e0-kube-api-access-xmvqf\") pod \"32971ca2-45d0-455e-9bd3-3452c7d044e0\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.342252 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32971ca2-45d0-455e-9bd3-3452c7d044e0-operator-scripts\") pod \"32971ca2-45d0-455e-9bd3-3452c7d044e0\" (UID: \"32971ca2-45d0-455e-9bd3-3452c7d044e0\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.342307 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a9c9a16-208d-4c67-bdb5-8300013965ff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9a9c9a16-208d-4c67-bdb5-8300013965ff" (UID: "9a9c9a16-208d-4c67-bdb5-8300013965ff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.342351 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-222ms\" (UniqueName: \"kubernetes.io/projected/cc095445-ea14-4648-9198-f86b355ec210-kube-api-access-222ms\") pod \"cc095445-ea14-4648-9198-f86b355ec210\" (UID: \"cc095445-ea14-4648-9198-f86b355ec210\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.342489 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz42n\" (UniqueName: \"kubernetes.io/projected/9a9c9a16-208d-4c67-bdb5-8300013965ff-kube-api-access-tz42n\") pod \"9a9c9a16-208d-4c67-bdb5-8300013965ff\" (UID: \"9a9c9a16-208d-4c67-bdb5-8300013965ff\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.342734 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bab6978-7963-4e17-aa8b-a814764f4393-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5bab6978-7963-4e17-aa8b-a814764f4393" (UID: "5bab6978-7963-4e17-aa8b-a814764f4393"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.343324 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32971ca2-45d0-455e-9bd3-3452c7d044e0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "32971ca2-45d0-455e-9bd3-3452c7d044e0" (UID: "32971ca2-45d0-455e-9bd3-3452c7d044e0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.343659 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a9c9a16-208d-4c67-bdb5-8300013965ff-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.343679 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bab6978-7963-4e17-aa8b-a814764f4393-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.343688 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32971ca2-45d0-455e-9bd3-3452c7d044e0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.343698 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc095445-ea14-4648-9198-f86b355ec210-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.353174 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bab6978-7963-4e17-aa8b-a814764f4393-kube-api-access-dq8qj" (OuterVolumeSpecName: "kube-api-access-dq8qj") pod "5bab6978-7963-4e17-aa8b-a814764f4393" (UID: "5bab6978-7963-4e17-aa8b-a814764f4393"). InnerVolumeSpecName "kube-api-access-dq8qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.358687 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32971ca2-45d0-455e-9bd3-3452c7d044e0-kube-api-access-xmvqf" (OuterVolumeSpecName: "kube-api-access-xmvqf") pod "32971ca2-45d0-455e-9bd3-3452c7d044e0" (UID: "32971ca2-45d0-455e-9bd3-3452c7d044e0"). InnerVolumeSpecName "kube-api-access-xmvqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.364057 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a9c9a16-208d-4c67-bdb5-8300013965ff-kube-api-access-tz42n" (OuterVolumeSpecName: "kube-api-access-tz42n") pod "9a9c9a16-208d-4c67-bdb5-8300013965ff" (UID: "9a9c9a16-208d-4c67-bdb5-8300013965ff"). InnerVolumeSpecName "kube-api-access-tz42n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.365306 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc095445-ea14-4648-9198-f86b355ec210-kube-api-access-222ms" (OuterVolumeSpecName: "kube-api-access-222ms") pod "cc095445-ea14-4648-9198-f86b355ec210" (UID: "cc095445-ea14-4648-9198-f86b355ec210"). InnerVolumeSpecName "kube-api-access-222ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.445884 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz42n\" (UniqueName: \"kubernetes.io/projected/9a9c9a16-208d-4c67-bdb5-8300013965ff-kube-api-access-tz42n\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.445921 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq8qj\" (UniqueName: \"kubernetes.io/projected/5bab6978-7963-4e17-aa8b-a814764f4393-kube-api-access-dq8qj\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.445934 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmvqf\" (UniqueName: \"kubernetes.io/projected/32971ca2-45d0-455e-9bd3-3452c7d044e0-kube-api-access-xmvqf\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.445947 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-222ms\" (UniqueName: \"kubernetes.io/projected/cc095445-ea14-4648-9198-f86b355ec210-kube-api-access-222ms\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.663229 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.753547 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-operator-scripts\") pod \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.753833 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvdzx\" (UniqueName: \"kubernetes.io/projected/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-kube-api-access-kvdzx\") pod \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\" (UID: \"73358556-e1fc-4ffe-a4ce-5b0c131b5c10\") " Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.754577 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "73358556-e1fc-4ffe-a4ce-5b0c131b5c10" (UID: "73358556-e1fc-4ffe-a4ce-5b0c131b5c10"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.761970 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-kube-api-access-kvdzx" (OuterVolumeSpecName: "kube-api-access-kvdzx") pod "73358556-e1fc-4ffe-a4ce-5b0c131b5c10" (UID: "73358556-e1fc-4ffe-a4ce-5b0c131b5c10"). InnerVolumeSpecName "kube-api-access-kvdzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.867890 4791 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4791]: I1208 21:44:00.870359 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvdzx\" (UniqueName: \"kubernetes.io/projected/73358556-e1fc-4ffe-a4ce-5b0c131b5c10-kube-api-access-kvdzx\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:01 crc kubenswrapper[4791]: I1208 21:44:01.123127 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5219-account-create-update-jxhzk" Dec 08 21:44:01 crc kubenswrapper[4791]: I1208 21:44:01.123170 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ncw6l" Dec 08 21:44:01 crc kubenswrapper[4791]: I1208 21:44:01.123178 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-868dn" Dec 08 21:44:01 crc kubenswrapper[4791]: I1208 21:44:01.123212 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8be1-account-create-update-k6gx9" Dec 08 21:44:01 crc kubenswrapper[4791]: I1208 21:44:01.123185 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5219-account-create-update-jxhzk" event={"ID":"73358556-e1fc-4ffe-a4ce-5b0c131b5c10","Type":"ContainerDied","Data":"c7bbaca276f1cafafae192e9d1283f40fb4b946dbbd9429897ee65bda7dcdce5"} Dec 08 21:44:01 crc kubenswrapper[4791]: I1208 21:44:01.131184 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7bbaca276f1cafafae192e9d1283f40fb4b946dbbd9429897ee65bda7dcdce5" Dec 08 21:44:01 crc kubenswrapper[4791]: I1208 21:44:01.123224 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f316-account-create-update-pwkqh" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.024281 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-6fdc69876f-c67rs"] Dec 08 21:44:03 crc kubenswrapper[4791]: E1208 21:44:03.030322 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32971ca2-45d0-455e-9bd3-3452c7d044e0" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030352 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="32971ca2-45d0-455e-9bd3-3452c7d044e0" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: E1208 21:44:03.030378 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73358556-e1fc-4ffe-a4ce-5b0c131b5c10" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030384 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="73358556-e1fc-4ffe-a4ce-5b0c131b5c10" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: E1208 21:44:03.030402 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ffcf886-dc11-49ff-9ab5-ee93d739852e" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030408 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ffcf886-dc11-49ff-9ab5-ee93d739852e" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: E1208 21:44:03.030422 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a9c9a16-208d-4c67-bdb5-8300013965ff" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030428 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a9c9a16-208d-4c67-bdb5-8300013965ff" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: E1208 21:44:03.030450 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bab6978-7963-4e17-aa8b-a814764f4393" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030455 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bab6978-7963-4e17-aa8b-a814764f4393" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: E1208 21:44:03.030474 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc095445-ea14-4648-9198-f86b355ec210" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030480 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc095445-ea14-4648-9198-f86b355ec210" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030670 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a9c9a16-208d-4c67-bdb5-8300013965ff" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030683 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc095445-ea14-4648-9198-f86b355ec210" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030693 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="32971ca2-45d0-455e-9bd3-3452c7d044e0" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030722 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bab6978-7963-4e17-aa8b-a814764f4393" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030733 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="73358556-e1fc-4ffe-a4ce-5b0c131b5c10" containerName="mariadb-account-create-update" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.030751 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ffcf886-dc11-49ff-9ab5-ee93d739852e" containerName="mariadb-database-create" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.031757 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.038031 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.038376 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-hbhtr" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.039612 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.060667 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6fdc69876f-c67rs"] Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.127521 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-combined-ca-bundle\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.127643 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66wmc\" (UniqueName: \"kubernetes.io/projected/30ff68d8-19dd-41a7-b38d-9f27571b27bc-kube-api-access-66wmc\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.127671 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.127855 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data-custom\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.213441 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-7hsq9"] Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.223955 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.243444 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-combined-ca-bundle\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.244267 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66wmc\" (UniqueName: \"kubernetes.io/projected/30ff68d8-19dd-41a7-b38d-9f27571b27bc-kube-api-access-66wmc\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.244374 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.244804 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data-custom\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.277500 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.298616 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66wmc\" (UniqueName: \"kubernetes.io/projected/30ff68d8-19dd-41a7-b38d-9f27571b27bc-kube-api-access-66wmc\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.301474 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-combined-ca-bundle\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.314405 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-7hsq9"] Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.319268 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data-custom\") pod \"heat-engine-6fdc69876f-c67rs\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.349795 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-svc\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.350963 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-sb\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.351556 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-swift-storage-0\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.352190 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-nb\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.352588 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-config\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.353535 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmtwd\" (UniqueName: \"kubernetes.io/projected/72a6c619-8a0d-4a9f-b68f-f316cf96202d-kube-api-access-dmtwd\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.360172 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-7c6cccb66c-d6kng"] Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.364968 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.368651 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.375534 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.432383 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7c6cccb66c-d6kng"] Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.470983 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-config\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471047 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmtwd\" (UniqueName: \"kubernetes.io/projected/72a6c619-8a0d-4a9f-b68f-f316cf96202d-kube-api-access-dmtwd\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471133 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-929lj\" (UniqueName: \"kubernetes.io/projected/b4ae5100-9cc7-462c-8424-d8837d636fa3-kube-api-access-929lj\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471235 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-svc\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471280 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-combined-ca-bundle\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471322 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-sb\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471358 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471389 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data-custom\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471425 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-swift-storage-0\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.471482 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-nb\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.472403 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-config\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.472477 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-nb\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.473183 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-svc\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.473281 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-sb\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.473914 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-swift-storage-0\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.496069 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-84b7f8ffb-qdgth"] Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.502837 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.499287 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmtwd\" (UniqueName: \"kubernetes.io/projected/72a6c619-8a0d-4a9f-b68f-f316cf96202d-kube-api-access-dmtwd\") pod \"dnsmasq-dns-688b9f5b49-7hsq9\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.509013 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.527787 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-84b7f8ffb-qdgth"] Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576141 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data-custom\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576189 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-929lj\" (UniqueName: \"kubernetes.io/projected/b4ae5100-9cc7-462c-8424-d8837d636fa3-kube-api-access-929lj\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576256 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-combined-ca-bundle\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576285 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdt6k\" (UniqueName: \"kubernetes.io/projected/51be1224-1eae-451d-9bac-6ab628fded83-kube-api-access-bdt6k\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576340 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-combined-ca-bundle\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576387 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576417 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data-custom\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.576441 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.584882 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data-custom\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.592069 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-combined-ca-bundle\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.596584 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.597482 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-929lj\" (UniqueName: \"kubernetes.io/projected/b4ae5100-9cc7-462c-8424-d8837d636fa3-kube-api-access-929lj\") pod \"heat-cfnapi-7c6cccb66c-d6kng\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.678572 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data-custom\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.678667 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-combined-ca-bundle\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.678701 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdt6k\" (UniqueName: \"kubernetes.io/projected/51be1224-1eae-451d-9bac-6ab628fded83-kube-api-access-bdt6k\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.678825 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.687960 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.689221 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data-custom\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.693792 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-combined-ca-bundle\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.707074 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.726476 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdt6k\" (UniqueName: \"kubernetes.io/projected/51be1224-1eae-451d-9bac-6ab628fded83-kube-api-access-bdt6k\") pod \"heat-api-84b7f8ffb-qdgth\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.728087 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:03 crc kubenswrapper[4791]: I1208 21:44:03.922089 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:04 crc kubenswrapper[4791]: I1208 21:44:04.125518 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6fdc69876f-c67rs"] Dec 08 21:44:04 crc kubenswrapper[4791]: W1208 21:44:04.146150 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30ff68d8_19dd_41a7_b38d_9f27571b27bc.slice/crio-8c64135f1d19785b5f063675c2ed46d32a8f287ecb27a4cd67c414be2cd57724 WatchSource:0}: Error finding container 8c64135f1d19785b5f063675c2ed46d32a8f287ecb27a4cd67c414be2cd57724: Status 404 returned error can't find the container with id 8c64135f1d19785b5f063675c2ed46d32a8f287ecb27a4cd67c414be2cd57724 Dec 08 21:44:04 crc kubenswrapper[4791]: I1208 21:44:04.225929 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6fdc69876f-c67rs" event={"ID":"30ff68d8-19dd-41a7-b38d-9f27571b27bc","Type":"ContainerStarted","Data":"8c64135f1d19785b5f063675c2ed46d32a8f287ecb27a4cd67c414be2cd57724"} Dec 08 21:44:04 crc kubenswrapper[4791]: I1208 21:44:04.432917 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-7hsq9"] Dec 08 21:44:04 crc kubenswrapper[4791]: I1208 21:44:04.559073 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7c6cccb66c-d6kng"] Dec 08 21:44:04 crc kubenswrapper[4791]: I1208 21:44:04.738689 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-84b7f8ffb-qdgth"] Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.238017 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84b7f8ffb-qdgth" event={"ID":"51be1224-1eae-451d-9bac-6ab628fded83","Type":"ContainerStarted","Data":"e725f2954f34894ba3390aafc25120b07b2e20c3d5bd9da826758e696ad59d52"} Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.240644 4791 generic.go:334] "Generic (PLEG): container finished" podID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerID="f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732" exitCode=0 Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.240757 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" event={"ID":"72a6c619-8a0d-4a9f-b68f-f316cf96202d","Type":"ContainerDied","Data":"f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732"} Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.240822 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" event={"ID":"72a6c619-8a0d-4a9f-b68f-f316cf96202d","Type":"ContainerStarted","Data":"f8fb3651cb8f13310d7cb88f6a275c7e28fd7741339f02165847142dc72c8fb2"} Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.248621 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6fdc69876f-c67rs" event={"ID":"30ff68d8-19dd-41a7-b38d-9f27571b27bc","Type":"ContainerStarted","Data":"45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d"} Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.248929 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.252674 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" event={"ID":"b4ae5100-9cc7-462c-8424-d8837d636fa3","Type":"ContainerStarted","Data":"2d3b244054dcdbafffea80b1502f283602a5b10dbb17ad9a2e38da4a702334da"} Dec 08 21:44:05 crc kubenswrapper[4791]: I1208 21:44:05.299935 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-6fdc69876f-c67rs" podStartSLOduration=3.2999116920000002 podStartE2EDuration="3.299911692s" podCreationTimestamp="2025-12-08 21:44:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:05.283540179 +0000 UTC m=+1521.982298514" watchObservedRunningTime="2025-12-08 21:44:05.299911692 +0000 UTC m=+1521.998670037" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.132047 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bm9lh"] Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.134051 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.139326 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.139782 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-55twv" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.141457 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.172615 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bm9lh"] Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.267661 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-config-data\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.267783 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fzjz\" (UniqueName: \"kubernetes.io/projected/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-kube-api-access-4fzjz\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.267842 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.267940 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-scripts\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.283210 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" event={"ID":"72a6c619-8a0d-4a9f-b68f-f316cf96202d","Type":"ContainerStarted","Data":"37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55"} Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.283323 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.310805 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" podStartSLOduration=3.310780635 podStartE2EDuration="3.310780635s" podCreationTimestamp="2025-12-08 21:44:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:06.303223489 +0000 UTC m=+1523.001981844" watchObservedRunningTime="2025-12-08 21:44:06.310780635 +0000 UTC m=+1523.009538980" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.370855 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.371120 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-scripts\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.371258 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-config-data\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.371392 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fzjz\" (UniqueName: \"kubernetes.io/projected/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-kube-api-access-4fzjz\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.378203 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-config-data\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.410431 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-scripts\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.410433 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.411004 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fzjz\" (UniqueName: \"kubernetes.io/projected/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-kube-api-access-4fzjz\") pod \"nova-cell0-conductor-db-sync-bm9lh\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.465666 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.542967 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:44:06 crc kubenswrapper[4791]: I1208 21:44:06.546664 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8dcddd8f7-nk4tp" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.397708 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-947d869cc-4cx89"] Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.399873 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.417932 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-d44cc5586-cqs7v"] Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.419664 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.444784 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-947d869cc-4cx89"] Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.458930 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-759ddfcf78-9k9nf"] Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.461077 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469403 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45nrn\" (UniqueName: \"kubernetes.io/projected/c5036ceb-802c-446d-ac98-a56f732e25d9-kube-api-access-45nrn\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469606 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data-custom\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469634 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-combined-ca-bundle\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469670 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-combined-ca-bundle\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469702 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-config-data-custom\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469754 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-config-data\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469782 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469821 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz97t\" (UniqueName: \"kubernetes.io/projected/7ba96aae-1964-4b6a-b1dd-204997df9230-kube-api-access-gz97t\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.469962 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-d44cc5586-cqs7v"] Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.495493 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-759ddfcf78-9k9nf"] Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573409 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data-custom\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573468 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-combined-ca-bundle\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573550 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-combined-ca-bundle\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573616 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-config-data-custom\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573665 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-config-data\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573699 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573759 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz97t\" (UniqueName: \"kubernetes.io/projected/7ba96aae-1964-4b6a-b1dd-204997df9230-kube-api-access-gz97t\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573851 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45nrn\" (UniqueName: \"kubernetes.io/projected/c5036ceb-802c-446d-ac98-a56f732e25d9-kube-api-access-45nrn\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.573922 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data-custom\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.574022 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v5qw\" (UniqueName: \"kubernetes.io/projected/997b752d-4e35-4780-a978-35cda6d832fa-kube-api-access-6v5qw\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.574072 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.574112 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-combined-ca-bundle\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.585884 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-combined-ca-bundle\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.585912 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-combined-ca-bundle\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.589225 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data-custom\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.599782 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-config-data-custom\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.601915 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.611152 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5036ceb-802c-446d-ac98-a56f732e25d9-config-data\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.630792 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz97t\" (UniqueName: \"kubernetes.io/projected/7ba96aae-1964-4b6a-b1dd-204997df9230-kube-api-access-gz97t\") pod \"heat-api-947d869cc-4cx89\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.639825 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45nrn\" (UniqueName: \"kubernetes.io/projected/c5036ceb-802c-446d-ac98-a56f732e25d9-kube-api-access-45nrn\") pod \"heat-engine-d44cc5586-cqs7v\" (UID: \"c5036ceb-802c-446d-ac98-a56f732e25d9\") " pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.679504 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data-custom\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.679602 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v5qw\" (UniqueName: \"kubernetes.io/projected/997b752d-4e35-4780-a978-35cda6d832fa-kube-api-access-6v5qw\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.679646 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.679672 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-combined-ca-bundle\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.683472 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-combined-ca-bundle\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.686939 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data-custom\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.698064 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.702874 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v5qw\" (UniqueName: \"kubernetes.io/projected/997b752d-4e35-4780-a978-35cda6d832fa-kube-api-access-6v5qw\") pod \"heat-cfnapi-759ddfcf78-9k9nf\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.786867 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.821127 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:10 crc kubenswrapper[4791]: I1208 21:44:10.828380 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.693862 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-84b7f8ffb-qdgth"] Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.708307 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-7c6cccb66c-d6kng"] Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.744158 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-675d76c787-jzkrg"] Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.746477 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.749580 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-internal-svc" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.749580 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-public-svc" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.768106 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-59878569f5-swkzv"] Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.770146 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.775474 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-internal-svc" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.775897 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-public-svc" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.795273 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-675d76c787-jzkrg"] Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808434 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-internal-tls-certs\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808509 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-internal-tls-certs\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808566 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjr9w\" (UniqueName: \"kubernetes.io/projected/650a3079-dd09-461d-b647-bae2adac5ee6-kube-api-access-pjr9w\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808587 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-combined-ca-bundle\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808610 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-config-data-custom\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808660 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-config-data-custom\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808681 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-config-data\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808732 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2d466\" (UniqueName: \"kubernetes.io/projected/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-kube-api-access-2d466\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808760 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-config-data\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808777 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-combined-ca-bundle\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808836 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-public-tls-certs\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.808867 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-public-tls-certs\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.811041 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-59878569f5-swkzv"] Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.910751 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-internal-tls-certs\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.910819 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-internal-tls-certs\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.910885 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjr9w\" (UniqueName: \"kubernetes.io/projected/650a3079-dd09-461d-b647-bae2adac5ee6-kube-api-access-pjr9w\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.910908 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-combined-ca-bundle\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.910929 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-config-data-custom\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.910996 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-config-data-custom\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.911027 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-config-data\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.911070 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2d466\" (UniqueName: \"kubernetes.io/projected/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-kube-api-access-2d466\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.911098 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-config-data\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.911116 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-combined-ca-bundle\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.911171 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-public-tls-certs\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.911208 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-public-tls-certs\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.920352 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-public-tls-certs\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.920894 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-config-data-custom\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.927543 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-combined-ca-bundle\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.928155 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-config-data\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.928639 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-internal-tls-certs\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.928705 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-config-data-custom\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.929550 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-combined-ca-bundle\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.929560 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-internal-tls-certs\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.930298 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/650a3079-dd09-461d-b647-bae2adac5ee6-config-data\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.930447 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-public-tls-certs\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.931195 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjr9w\" (UniqueName: \"kubernetes.io/projected/650a3079-dd09-461d-b647-bae2adac5ee6-kube-api-access-pjr9w\") pod \"heat-cfnapi-675d76c787-jzkrg\" (UID: \"650a3079-dd09-461d-b647-bae2adac5ee6\") " pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:11 crc kubenswrapper[4791]: I1208 21:44:11.931426 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2d466\" (UniqueName: \"kubernetes.io/projected/ad72828f-fcf7-494f-8ccf-384cde0ef6c9-kube-api-access-2d466\") pod \"heat-api-59878569f5-swkzv\" (UID: \"ad72828f-fcf7-494f-8ccf-384cde0ef6c9\") " pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:12 crc kubenswrapper[4791]: I1208 21:44:12.081513 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:12 crc kubenswrapper[4791]: I1208 21:44:12.112000 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:13 crc kubenswrapper[4791]: I1208 21:44:13.710023 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:44:13 crc kubenswrapper[4791]: I1208 21:44:13.780146 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-fgpqc"] Dec 08 21:44:13 crc kubenswrapper[4791]: I1208 21:44:13.780491 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" podUID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerName="dnsmasq-dns" containerID="cri-o://e231692aaf2208858357bb639ca4381b42ae68294b0ea0dbd48d3790c68d1aa6" gracePeriod=10 Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.430501 4791 generic.go:334] "Generic (PLEG): container finished" podID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerID="e231692aaf2208858357bb639ca4381b42ae68294b0ea0dbd48d3790c68d1aa6" exitCode=0 Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.430586 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" event={"ID":"8d81e17e-0d6f-461b-8bd9-8d277de96edd","Type":"ContainerDied","Data":"e231692aaf2208858357bb639ca4381b42ae68294b0ea0dbd48d3790c68d1aa6"} Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.592989 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.689991 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w82m\" (UniqueName: \"kubernetes.io/projected/8d81e17e-0d6f-461b-8bd9-8d277de96edd-kube-api-access-2w82m\") pod \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.690312 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-config\") pod \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.690525 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-swift-storage-0\") pod \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.690552 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-nb\") pod \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.690618 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-sb\") pod \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.690679 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-svc\") pod \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\" (UID: \"8d81e17e-0d6f-461b-8bd9-8d277de96edd\") " Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.768915 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d81e17e-0d6f-461b-8bd9-8d277de96edd-kube-api-access-2w82m" (OuterVolumeSpecName: "kube-api-access-2w82m") pod "8d81e17e-0d6f-461b-8bd9-8d277de96edd" (UID: "8d81e17e-0d6f-461b-8bd9-8d277de96edd"). InnerVolumeSpecName "kube-api-access-2w82m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:14 crc kubenswrapper[4791]: I1208 21:44:14.793272 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w82m\" (UniqueName: \"kubernetes.io/projected/8d81e17e-0d6f-461b-8bd9-8d277de96edd-kube-api-access-2w82m\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.042675 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-59878569f5-swkzv"] Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.065664 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-759ddfcf78-9k9nf"] Dec 08 21:44:15 crc kubenswrapper[4791]: W1208 21:44:15.067111 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod650a3079_dd09_461d_b647_bae2adac5ee6.slice/crio-20e7f4dbde70554f4ea7e6d712b2d17e0e58b31369182f2080ac3a6b0efb1dbb WatchSource:0}: Error finding container 20e7f4dbde70554f4ea7e6d712b2d17e0e58b31369182f2080ac3a6b0efb1dbb: Status 404 returned error can't find the container with id 20e7f4dbde70554f4ea7e6d712b2d17e0e58b31369182f2080ac3a6b0efb1dbb Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.078230 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-675d76c787-jzkrg"] Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.198330 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8d81e17e-0d6f-461b-8bd9-8d277de96edd" (UID: "8d81e17e-0d6f-461b-8bd9-8d277de96edd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.203476 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.207783 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-config" (OuterVolumeSpecName: "config") pod "8d81e17e-0d6f-461b-8bd9-8d277de96edd" (UID: "8d81e17e-0d6f-461b-8bd9-8d277de96edd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.235117 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8d81e17e-0d6f-461b-8bd9-8d277de96edd" (UID: "8d81e17e-0d6f-461b-8bd9-8d277de96edd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.298900 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8d81e17e-0d6f-461b-8bd9-8d277de96edd" (UID: "8d81e17e-0d6f-461b-8bd9-8d277de96edd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.312845 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.312910 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.312926 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4791]: W1208 21:44:15.334117 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ba96aae_1964_4b6a_b1dd_204997df9230.slice/crio-8171d7437b85e90c5a4954d650bde91ccb4af1c484f82163a8c2fd162b789ed0 WatchSource:0}: Error finding container 8171d7437b85e90c5a4954d650bde91ccb4af1c484f82163a8c2fd162b789ed0: Status 404 returned error can't find the container with id 8171d7437b85e90c5a4954d650bde91ccb4af1c484f82163a8c2fd162b789ed0 Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.339853 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bm9lh"] Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.345136 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8d81e17e-0d6f-461b-8bd9-8d277de96edd" (UID: "8d81e17e-0d6f-461b-8bd9-8d277de96edd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.358815 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-947d869cc-4cx89"] Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.384328 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-d44cc5586-cqs7v"] Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.419128 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d81e17e-0d6f-461b-8bd9-8d277de96edd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.458852 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-59878569f5-swkzv" event={"ID":"ad72828f-fcf7-494f-8ccf-384cde0ef6c9","Type":"ContainerStarted","Data":"09b1b0983c39e3dad3d1daf289fe4acc3b6e384d3caa90c4a47f2e306ca27c08"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.485136 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84b7f8ffb-qdgth" event={"ID":"51be1224-1eae-451d-9bac-6ab628fded83","Type":"ContainerStarted","Data":"b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.485370 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-84b7f8ffb-qdgth" podUID="51be1224-1eae-451d-9bac-6ab628fded83" containerName="heat-api" containerID="cri-o://b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e" gracePeriod=60 Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.485782 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.533971 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-947d869cc-4cx89" event={"ID":"7ba96aae-1964-4b6a-b1dd-204997df9230","Type":"ContainerStarted","Data":"8171d7437b85e90c5a4954d650bde91ccb4af1c484f82163a8c2fd162b789ed0"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.555398 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" event={"ID":"e02fffe8-6208-48f0-ba89-6d54f07f5ae4","Type":"ContainerStarted","Data":"e3574a870767712005dae8df85840379afd67a3f752a5905d1b67282897c62bf"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.563031 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-d44cc5586-cqs7v" event={"ID":"c5036ceb-802c-446d-ac98-a56f732e25d9","Type":"ContainerStarted","Data":"0ddb9d3f36981cf6aa54a02a436736c5351924dfc554f6ac80445024a69331aa"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.577896 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-84b7f8ffb-qdgth" podStartSLOduration=3.097456651 podStartE2EDuration="12.577873269s" podCreationTimestamp="2025-12-08 21:44:03 +0000 UTC" firstStartedPulling="2025-12-08 21:44:04.740608092 +0000 UTC m=+1521.439366437" lastFinishedPulling="2025-12-08 21:44:14.22102471 +0000 UTC m=+1530.919783055" observedRunningTime="2025-12-08 21:44:15.548151268 +0000 UTC m=+1532.246909633" watchObservedRunningTime="2025-12-08 21:44:15.577873269 +0000 UTC m=+1532.276631614" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.592959 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" event={"ID":"8d81e17e-0d6f-461b-8bd9-8d277de96edd","Type":"ContainerDied","Data":"b0492c1345a2932fdd3a479cf81fcbea6376237cc4fc7fb76cc6a9230336f0ec"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.593024 4791 scope.go:117] "RemoveContainer" containerID="e231692aaf2208858357bb639ca4381b42ae68294b0ea0dbd48d3790c68d1aa6" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.593182 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-fgpqc" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.611881 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" podUID="b4ae5100-9cc7-462c-8424-d8837d636fa3" containerName="heat-cfnapi" containerID="cri-o://e959ebc768a3bbd9021396f2e57a7765d04f0e4835b94174715541950afc8b90" gracePeriod=60 Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.679022 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" event={"ID":"b4ae5100-9cc7-462c-8424-d8837d636fa3","Type":"ContainerStarted","Data":"e959ebc768a3bbd9021396f2e57a7765d04f0e4835b94174715541950afc8b90"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.679067 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" event={"ID":"997b752d-4e35-4780-a978-35cda6d832fa","Type":"ContainerStarted","Data":"b772c57c3c9de5b2a8e092f215b004092e57845b485071dea7200fce274574a6"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.679084 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.679094 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"79f21b28-25e8-4260-a133-910ab353ed8c","Type":"ContainerStarted","Data":"ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.695870 4791 scope.go:117] "RemoveContainer" containerID="d03ecd51440fbad4af5d9acedc54533235477560f6e635611fcf91fe8f7f253a" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.696048 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-675d76c787-jzkrg" event={"ID":"650a3079-dd09-461d-b647-bae2adac5ee6","Type":"ContainerStarted","Data":"20e7f4dbde70554f4ea7e6d712b2d17e0e58b31369182f2080ac3a6b0efb1dbb"} Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.720596 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" podStartSLOduration=3.130278867 podStartE2EDuration="12.720569627s" podCreationTimestamp="2025-12-08 21:44:03 +0000 UTC" firstStartedPulling="2025-12-08 21:44:04.624566209 +0000 UTC m=+1521.323324554" lastFinishedPulling="2025-12-08 21:44:14.214856969 +0000 UTC m=+1530.913615314" observedRunningTime="2025-12-08 21:44:15.659436364 +0000 UTC m=+1532.358194709" watchObservedRunningTime="2025-12-08 21:44:15.720569627 +0000 UTC m=+1532.419327982" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.741640 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.992791982 podStartE2EDuration="20.741615944s" podCreationTimestamp="2025-12-08 21:43:55 +0000 UTC" firstStartedPulling="2025-12-08 21:43:57.423560432 +0000 UTC m=+1514.122318777" lastFinishedPulling="2025-12-08 21:44:14.172384384 +0000 UTC m=+1530.871142739" observedRunningTime="2025-12-08 21:44:15.724110764 +0000 UTC m=+1532.422869129" watchObservedRunningTime="2025-12-08 21:44:15.741615944 +0000 UTC m=+1532.440374289" Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.963671 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-fgpqc"] Dec 08 21:44:15 crc kubenswrapper[4791]: I1208 21:44:15.977110 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-fgpqc"] Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.711355 4791 generic.go:334] "Generic (PLEG): container finished" podID="997b752d-4e35-4780-a978-35cda6d832fa" containerID="a76d075979477748f13253e368994b2ceec3e69322c63acc19e37e141b7d467a" exitCode=1 Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.711500 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" event={"ID":"997b752d-4e35-4780-a978-35cda6d832fa","Type":"ContainerDied","Data":"a76d075979477748f13253e368994b2ceec3e69322c63acc19e37e141b7d467a"} Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.712361 4791 scope.go:117] "RemoveContainer" containerID="a76d075979477748f13253e368994b2ceec3e69322c63acc19e37e141b7d467a" Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.716649 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-d44cc5586-cqs7v" event={"ID":"c5036ceb-802c-446d-ac98-a56f732e25d9","Type":"ContainerStarted","Data":"92fedb1141952e1caa2f498169344fb4c3c9184e66910d8132bb6ed49f6a601d"} Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.717586 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.720466 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-675d76c787-jzkrg" event={"ID":"650a3079-dd09-461d-b647-bae2adac5ee6","Type":"ContainerStarted","Data":"6c17ac97144ba1e49f4a4f124de8a43a6f04526bb2fe512d297bd77271404582"} Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.721531 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.723656 4791 generic.go:334] "Generic (PLEG): container finished" podID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerID="48dc6283c55a075ef677199f2141a5aae4771d60f67891a600bafc5b5b6e8b42" exitCode=1 Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.723737 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-947d869cc-4cx89" event={"ID":"7ba96aae-1964-4b6a-b1dd-204997df9230","Type":"ContainerDied","Data":"48dc6283c55a075ef677199f2141a5aae4771d60f67891a600bafc5b5b6e8b42"} Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.724146 4791 scope.go:117] "RemoveContainer" containerID="48dc6283c55a075ef677199f2141a5aae4771d60f67891a600bafc5b5b6e8b42" Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.737307 4791 generic.go:334] "Generic (PLEG): container finished" podID="b4ae5100-9cc7-462c-8424-d8837d636fa3" containerID="e959ebc768a3bbd9021396f2e57a7765d04f0e4835b94174715541950afc8b90" exitCode=0 Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.737474 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" event={"ID":"b4ae5100-9cc7-462c-8424-d8837d636fa3","Type":"ContainerDied","Data":"e959ebc768a3bbd9021396f2e57a7765d04f0e4835b94174715541950afc8b90"} Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.745825 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-59878569f5-swkzv" event={"ID":"ad72828f-fcf7-494f-8ccf-384cde0ef6c9","Type":"ContainerStarted","Data":"38b0737490cd6290873c2f3a488c8b8ce860e2c9ff63eaade932b49313131b41"} Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.746390 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.769611 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-675d76c787-jzkrg" podStartSLOduration=5.769593667 podStartE2EDuration="5.769593667s" podCreationTimestamp="2025-12-08 21:44:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:16.763755473 +0000 UTC m=+1533.462513818" watchObservedRunningTime="2025-12-08 21:44:16.769593667 +0000 UTC m=+1533.468352002" Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.800337 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-d44cc5586-cqs7v" podStartSLOduration=6.800312612 podStartE2EDuration="6.800312612s" podCreationTimestamp="2025-12-08 21:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:16.78273982 +0000 UTC m=+1533.481498165" watchObservedRunningTime="2025-12-08 21:44:16.800312612 +0000 UTC m=+1533.499070967" Dec 08 21:44:16 crc kubenswrapper[4791]: I1208 21:44:16.840624 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-59878569f5-swkzv" podStartSLOduration=5.840593482 podStartE2EDuration="5.840593482s" podCreationTimestamp="2025-12-08 21:44:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:16.829904679 +0000 UTC m=+1533.528663044" watchObservedRunningTime="2025-12-08 21:44:16.840593482 +0000 UTC m=+1533.539351827" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.385605 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.487008 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data-custom\") pod \"b4ae5100-9cc7-462c-8424-d8837d636fa3\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.487358 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data\") pod \"b4ae5100-9cc7-462c-8424-d8837d636fa3\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.487576 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-combined-ca-bundle\") pod \"b4ae5100-9cc7-462c-8424-d8837d636fa3\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.487856 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-929lj\" (UniqueName: \"kubernetes.io/projected/b4ae5100-9cc7-462c-8424-d8837d636fa3-kube-api-access-929lj\") pod \"b4ae5100-9cc7-462c-8424-d8837d636fa3\" (UID: \"b4ae5100-9cc7-462c-8424-d8837d636fa3\") " Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.493887 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4ae5100-9cc7-462c-8424-d8837d636fa3-kube-api-access-929lj" (OuterVolumeSpecName: "kube-api-access-929lj") pod "b4ae5100-9cc7-462c-8424-d8837d636fa3" (UID: "b4ae5100-9cc7-462c-8424-d8837d636fa3"). InnerVolumeSpecName "kube-api-access-929lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.494348 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b4ae5100-9cc7-462c-8424-d8837d636fa3" (UID: "b4ae5100-9cc7-462c-8424-d8837d636fa3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.533299 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4ae5100-9cc7-462c-8424-d8837d636fa3" (UID: "b4ae5100-9cc7-462c-8424-d8837d636fa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.557960 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data" (OuterVolumeSpecName: "config-data") pod "b4ae5100-9cc7-462c-8424-d8837d636fa3" (UID: "b4ae5100-9cc7-462c-8424-d8837d636fa3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.594533 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-929lj\" (UniqueName: \"kubernetes.io/projected/b4ae5100-9cc7-462c-8424-d8837d636fa3-kube-api-access-929lj\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.594610 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.594625 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.594639 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ae5100-9cc7-462c-8424-d8837d636fa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.616356 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" path="/var/lib/kubelet/pods/8d81e17e-0d6f-461b-8bd9-8d277de96edd/volumes" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.759725 4791 generic.go:334] "Generic (PLEG): container finished" podID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerID="f119605dbc8ad2dd193cbfa63933c8b916190d7ec566c77823b6d086e0a299fa" exitCode=1 Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.759829 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-947d869cc-4cx89" event={"ID":"7ba96aae-1964-4b6a-b1dd-204997df9230","Type":"ContainerDied","Data":"f119605dbc8ad2dd193cbfa63933c8b916190d7ec566c77823b6d086e0a299fa"} Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.759902 4791 scope.go:117] "RemoveContainer" containerID="48dc6283c55a075ef677199f2141a5aae4771d60f67891a600bafc5b5b6e8b42" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.760840 4791 scope.go:117] "RemoveContainer" containerID="f119605dbc8ad2dd193cbfa63933c8b916190d7ec566c77823b6d086e0a299fa" Dec 08 21:44:17 crc kubenswrapper[4791]: E1208 21:44:17.761352 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-947d869cc-4cx89_openstack(7ba96aae-1964-4b6a-b1dd-204997df9230)\"" pod="openstack/heat-api-947d869cc-4cx89" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.762387 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" event={"ID":"b4ae5100-9cc7-462c-8424-d8837d636fa3","Type":"ContainerDied","Data":"2d3b244054dcdbafffea80b1502f283602a5b10dbb17ad9a2e38da4a702334da"} Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.762462 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7c6cccb66c-d6kng" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.766337 4791 generic.go:334] "Generic (PLEG): container finished" podID="997b752d-4e35-4780-a978-35cda6d832fa" containerID="9374abbf86dacdc6560893cf2d338b2b11dbfa3ec5ac9f799643dd2e1c84309d" exitCode=1 Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.767625 4791 scope.go:117] "RemoveContainer" containerID="9374abbf86dacdc6560893cf2d338b2b11dbfa3ec5ac9f799643dd2e1c84309d" Dec 08 21:44:17 crc kubenswrapper[4791]: E1208 21:44:17.767943 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-759ddfcf78-9k9nf_openstack(997b752d-4e35-4780-a978-35cda6d832fa)\"" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" podUID="997b752d-4e35-4780-a978-35cda6d832fa" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.768126 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" event={"ID":"997b752d-4e35-4780-a978-35cda6d832fa","Type":"ContainerDied","Data":"9374abbf86dacdc6560893cf2d338b2b11dbfa3ec5ac9f799643dd2e1c84309d"} Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.845768 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-7c6cccb66c-d6kng"] Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.848393 4791 scope.go:117] "RemoveContainer" containerID="e959ebc768a3bbd9021396f2e57a7765d04f0e4835b94174715541950afc8b90" Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.858062 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-7c6cccb66c-d6kng"] Dec 08 21:44:17 crc kubenswrapper[4791]: I1208 21:44:17.871330 4791 scope.go:117] "RemoveContainer" containerID="a76d075979477748f13253e368994b2ceec3e69322c63acc19e37e141b7d467a" Dec 08 21:44:18 crc kubenswrapper[4791]: I1208 21:44:18.787068 4791 scope.go:117] "RemoveContainer" containerID="9374abbf86dacdc6560893cf2d338b2b11dbfa3ec5ac9f799643dd2e1c84309d" Dec 08 21:44:18 crc kubenswrapper[4791]: E1208 21:44:18.787319 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-759ddfcf78-9k9nf_openstack(997b752d-4e35-4780-a978-35cda6d832fa)\"" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" podUID="997b752d-4e35-4780-a978-35cda6d832fa" Dec 08 21:44:18 crc kubenswrapper[4791]: I1208 21:44:18.800080 4791 scope.go:117] "RemoveContainer" containerID="f119605dbc8ad2dd193cbfa63933c8b916190d7ec566c77823b6d086e0a299fa" Dec 08 21:44:18 crc kubenswrapper[4791]: E1208 21:44:18.800706 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-947d869cc-4cx89_openstack(7ba96aae-1964-4b6a-b1dd-204997df9230)\"" pod="openstack/heat-api-947d869cc-4cx89" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" Dec 08 21:44:19 crc kubenswrapper[4791]: I1208 21:44:19.611845 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4ae5100-9cc7-462c-8424-d8837d636fa3" path="/var/lib/kubelet/pods/b4ae5100-9cc7-462c-8424-d8837d636fa3/volumes" Dec 08 21:44:20 crc kubenswrapper[4791]: I1208 21:44:20.788882 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:20 crc kubenswrapper[4791]: I1208 21:44:20.789747 4791 scope.go:117] "RemoveContainer" containerID="f119605dbc8ad2dd193cbfa63933c8b916190d7ec566c77823b6d086e0a299fa" Dec 08 21:44:20 crc kubenswrapper[4791]: E1208 21:44:20.789983 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-947d869cc-4cx89_openstack(7ba96aae-1964-4b6a-b1dd-204997df9230)\"" pod="openstack/heat-api-947d869cc-4cx89" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" Dec 08 21:44:20 crc kubenswrapper[4791]: I1208 21:44:20.790827 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:20 crc kubenswrapper[4791]: I1208 21:44:20.820384 4791 scope.go:117] "RemoveContainer" containerID="f119605dbc8ad2dd193cbfa63933c8b916190d7ec566c77823b6d086e0a299fa" Dec 08 21:44:20 crc kubenswrapper[4791]: E1208 21:44:20.820895 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-947d869cc-4cx89_openstack(7ba96aae-1964-4b6a-b1dd-204997df9230)\"" pod="openstack/heat-api-947d869cc-4cx89" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" Dec 08 21:44:20 crc kubenswrapper[4791]: I1208 21:44:20.829026 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:20 crc kubenswrapper[4791]: I1208 21:44:20.829398 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:20 crc kubenswrapper[4791]: I1208 21:44:20.830322 4791 scope.go:117] "RemoveContainer" containerID="9374abbf86dacdc6560893cf2d338b2b11dbfa3ec5ac9f799643dd2e1c84309d" Dec 08 21:44:20 crc kubenswrapper[4791]: E1208 21:44:20.831013 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-759ddfcf78-9k9nf_openstack(997b752d-4e35-4780-a978-35cda6d832fa)\"" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" podUID="997b752d-4e35-4780-a978-35cda6d832fa" Dec 08 21:44:23 crc kubenswrapper[4791]: I1208 21:44:23.430672 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:24 crc kubenswrapper[4791]: I1208 21:44:24.401306 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-675d76c787-jzkrg" Dec 08 21:44:24 crc kubenswrapper[4791]: I1208 21:44:24.480036 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-759ddfcf78-9k9nf"] Dec 08 21:44:24 crc kubenswrapper[4791]: I1208 21:44:24.779910 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-59878569f5-swkzv" Dec 08 21:44:24 crc kubenswrapper[4791]: I1208 21:44:24.878323 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-947d869cc-4cx89"] Dec 08 21:44:26 crc kubenswrapper[4791]: I1208 21:44:26.313817 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.696532 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.704266 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803261 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-combined-ca-bundle\") pod \"997b752d-4e35-4780-a978-35cda6d832fa\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803306 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data\") pod \"7ba96aae-1964-4b6a-b1dd-204997df9230\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803344 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gz97t\" (UniqueName: \"kubernetes.io/projected/7ba96aae-1964-4b6a-b1dd-204997df9230-kube-api-access-gz97t\") pod \"7ba96aae-1964-4b6a-b1dd-204997df9230\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803421 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6v5qw\" (UniqueName: \"kubernetes.io/projected/997b752d-4e35-4780-a978-35cda6d832fa-kube-api-access-6v5qw\") pod \"997b752d-4e35-4780-a978-35cda6d832fa\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803470 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data-custom\") pod \"7ba96aae-1964-4b6a-b1dd-204997df9230\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803546 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data\") pod \"997b752d-4e35-4780-a978-35cda6d832fa\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803591 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-combined-ca-bundle\") pod \"7ba96aae-1964-4b6a-b1dd-204997df9230\" (UID: \"7ba96aae-1964-4b6a-b1dd-204997df9230\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.803649 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data-custom\") pod \"997b752d-4e35-4780-a978-35cda6d832fa\" (UID: \"997b752d-4e35-4780-a978-35cda6d832fa\") " Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.824055 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/997b752d-4e35-4780-a978-35cda6d832fa-kube-api-access-6v5qw" (OuterVolumeSpecName: "kube-api-access-6v5qw") pod "997b752d-4e35-4780-a978-35cda6d832fa" (UID: "997b752d-4e35-4780-a978-35cda6d832fa"). InnerVolumeSpecName "kube-api-access-6v5qw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.835119 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "997b752d-4e35-4780-a978-35cda6d832fa" (UID: "997b752d-4e35-4780-a978-35cda6d832fa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.864944 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ba96aae-1964-4b6a-b1dd-204997df9230-kube-api-access-gz97t" (OuterVolumeSpecName: "kube-api-access-gz97t") pod "7ba96aae-1964-4b6a-b1dd-204997df9230" (UID: "7ba96aae-1964-4b6a-b1dd-204997df9230"). InnerVolumeSpecName "kube-api-access-gz97t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.865223 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7ba96aae-1964-4b6a-b1dd-204997df9230" (UID: "7ba96aae-1964-4b6a-b1dd-204997df9230"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.907459 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gz97t\" (UniqueName: \"kubernetes.io/projected/7ba96aae-1964-4b6a-b1dd-204997df9230-kube-api-access-gz97t\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.907499 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6v5qw\" (UniqueName: \"kubernetes.io/projected/997b752d-4e35-4780-a978-35cda6d832fa-kube-api-access-6v5qw\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.907511 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.907523 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.911138 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "997b752d-4e35-4780-a978-35cda6d832fa" (UID: "997b752d-4e35-4780-a978-35cda6d832fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.938003 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ba96aae-1964-4b6a-b1dd-204997df9230" (UID: "7ba96aae-1964-4b6a-b1dd-204997df9230"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.991250 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data" (OuterVolumeSpecName: "config-data") pod "7ba96aae-1964-4b6a-b1dd-204997df9230" (UID: "7ba96aae-1964-4b6a-b1dd-204997df9230"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:27 crc kubenswrapper[4791]: I1208 21:44:27.994347 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data" (OuterVolumeSpecName: "config-data") pod "997b752d-4e35-4780-a978-35cda6d832fa" (UID: "997b752d-4e35-4780-a978-35cda6d832fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.009871 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-947d869cc-4cx89" event={"ID":"7ba96aae-1964-4b6a-b1dd-204997df9230","Type":"ContainerDied","Data":"8171d7437b85e90c5a4954d650bde91ccb4af1c484f82163a8c2fd162b789ed0"} Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.009970 4791 scope.go:117] "RemoveContainer" containerID="f119605dbc8ad2dd193cbfa63933c8b916190d7ec566c77823b6d086e0a299fa" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.010056 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-947d869cc-4cx89" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.012162 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.012191 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.012200 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/997b752d-4e35-4780-a978-35cda6d832fa-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.012210 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba96aae-1964-4b6a-b1dd-204997df9230-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.013003 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" event={"ID":"997b752d-4e35-4780-a978-35cda6d832fa","Type":"ContainerDied","Data":"b772c57c3c9de5b2a8e092f215b004092e57845b485071dea7200fce274574a6"} Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.013115 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-759ddfcf78-9k9nf" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.053694 4791 scope.go:117] "RemoveContainer" containerID="9374abbf86dacdc6560893cf2d338b2b11dbfa3ec5ac9f799643dd2e1c84309d" Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.075786 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-759ddfcf78-9k9nf"] Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.095729 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-759ddfcf78-9k9nf"] Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.124825 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-947d869cc-4cx89"] Dec 08 21:44:28 crc kubenswrapper[4791]: I1208 21:44:28.132332 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-947d869cc-4cx89"] Dec 08 21:44:29 crc kubenswrapper[4791]: I1208 21:44:29.033132 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" event={"ID":"e02fffe8-6208-48f0-ba89-6d54f07f5ae4","Type":"ContainerStarted","Data":"55fad5012a7e2bb2fe91c5086e8fd40d5b5fddcc9b496c17fdb3a2c863152c60"} Dec 08 21:44:29 crc kubenswrapper[4791]: I1208 21:44:29.062419 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" podStartSLOduration=10.849216736 podStartE2EDuration="23.062397317s" podCreationTimestamp="2025-12-08 21:44:06 +0000 UTC" firstStartedPulling="2025-12-08 21:44:15.318687197 +0000 UTC m=+1532.017445542" lastFinishedPulling="2025-12-08 21:44:27.531867778 +0000 UTC m=+1544.230626123" observedRunningTime="2025-12-08 21:44:29.045922632 +0000 UTC m=+1545.744680977" watchObservedRunningTime="2025-12-08 21:44:29.062397317 +0000 UTC m=+1545.761155662" Dec 08 21:44:29 crc kubenswrapper[4791]: I1208 21:44:29.609582 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" path="/var/lib/kubelet/pods/7ba96aae-1964-4b6a-b1dd-204997df9230/volumes" Dec 08 21:44:29 crc kubenswrapper[4791]: I1208 21:44:29.610411 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="997b752d-4e35-4780-a978-35cda6d832fa" path="/var/lib/kubelet/pods/997b752d-4e35-4780-a978-35cda6d832fa/volumes" Dec 08 21:44:30 crc kubenswrapper[4791]: I1208 21:44:30.520000 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:44:30 crc kubenswrapper[4791]: I1208 21:44:30.520575 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-log" containerID="cri-o://b76ecd3a1ebf375f740d54b66e488f64605f0612f4fde44c2a1ad904ceb0ff88" gracePeriod=30 Dec 08 21:44:30 crc kubenswrapper[4791]: I1208 21:44:30.520900 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-httpd" containerID="cri-o://f05edc8ce19b5afe71859980edb82c0121f1e4915bad8def8ab725c39e8e82fa" gracePeriod=30 Dec 08 21:44:30 crc kubenswrapper[4791]: I1208 21:44:30.860621 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-d44cc5586-cqs7v" Dec 08 21:44:30 crc kubenswrapper[4791]: I1208 21:44:30.916435 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-6fdc69876f-c67rs"] Dec 08 21:44:30 crc kubenswrapper[4791]: I1208 21:44:30.916978 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-6fdc69876f-c67rs" podUID="30ff68d8-19dd-41a7-b38d-9f27571b27bc" containerName="heat-engine" containerID="cri-o://45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" gracePeriod=60 Dec 08 21:44:31 crc kubenswrapper[4791]: I1208 21:44:31.054495 4791 generic.go:334] "Generic (PLEG): container finished" podID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerID="b76ecd3a1ebf375f740d54b66e488f64605f0612f4fde44c2a1ad904ceb0ff88" exitCode=143 Dec 08 21:44:31 crc kubenswrapper[4791]: I1208 21:44:31.054931 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"477be1bd-91b3-46cb-ac8d-8e1bd8242066","Type":"ContainerDied","Data":"b76ecd3a1ebf375f740d54b66e488f64605f0612f4fde44c2a1ad904ceb0ff88"} Dec 08 21:44:32 crc kubenswrapper[4791]: I1208 21:44:32.074121 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="6aaa3b083eed387233afd8e27c4bf5a9ae6d9cb075ffa437c67d681e065ad29e" exitCode=1 Dec 08 21:44:32 crc kubenswrapper[4791]: I1208 21:44:32.074186 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"6aaa3b083eed387233afd8e27c4bf5a9ae6d9cb075ffa437c67d681e065ad29e"} Dec 08 21:44:32 crc kubenswrapper[4791]: I1208 21:44:32.074242 4791 scope.go:117] "RemoveContainer" containerID="fa46debf45bbc591d3f3d2ff279b0f4ae741603ae5627a9244c1052ab274ef8a" Dec 08 21:44:32 crc kubenswrapper[4791]: I1208 21:44:32.080642 4791 scope.go:117] "RemoveContainer" containerID="6aaa3b083eed387233afd8e27c4bf5a9ae6d9cb075ffa437c67d681e065ad29e" Dec 08 21:44:32 crc kubenswrapper[4791]: E1208 21:44:32.081590 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:44:32 crc kubenswrapper[4791]: I1208 21:44:32.270033 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:44:32 crc kubenswrapper[4791]: I1208 21:44:32.270619 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-log" containerID="cri-o://f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760" gracePeriod=30 Dec 08 21:44:32 crc kubenswrapper[4791]: I1208 21:44:32.270812 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-httpd" containerID="cri-o://3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111" gracePeriod=30 Dec 08 21:44:33 crc kubenswrapper[4791]: I1208 21:44:33.085362 4791 generic.go:334] "Generic (PLEG): container finished" podID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerID="f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760" exitCode=143 Dec 08 21:44:33 crc kubenswrapper[4791]: I1208 21:44:33.085426 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5","Type":"ContainerDied","Data":"f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760"} Dec 08 21:44:33 crc kubenswrapper[4791]: E1208 21:44:33.378546 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 08 21:44:33 crc kubenswrapper[4791]: E1208 21:44:33.380540 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 08 21:44:33 crc kubenswrapper[4791]: E1208 21:44:33.386045 4791 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 08 21:44:33 crc kubenswrapper[4791]: E1208 21:44:33.387141 4791 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-6fdc69876f-c67rs" podUID="30ff68d8-19dd-41a7-b38d-9f27571b27bc" containerName="heat-engine" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.132292 4791 generic.go:334] "Generic (PLEG): container finished" podID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerID="f05edc8ce19b5afe71859980edb82c0121f1e4915bad8def8ab725c39e8e82fa" exitCode=0 Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.132507 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"477be1bd-91b3-46cb-ac8d-8e1bd8242066","Type":"ContainerDied","Data":"f05edc8ce19b5afe71859980edb82c0121f1e4915bad8def8ab725c39e8e82fa"} Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.353881 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.498763 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-logs\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.498884 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-httpd-run\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.498927 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-public-tls-certs\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.498969 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-scripts\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.499006 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-combined-ca-bundle\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.499425 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-logs" (OuterVolumeSpecName: "logs") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.503804 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.503869 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-config-data\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.503996 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkqxm\" (UniqueName: \"kubernetes.io/projected/477be1bd-91b3-46cb-ac8d-8e1bd8242066-kube-api-access-pkqxm\") pod \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\" (UID: \"477be1bd-91b3-46cb-ac8d-8e1bd8242066\") " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.504808 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.510183 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.530832 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-scripts" (OuterVolumeSpecName: "scripts") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.531005 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477be1bd-91b3-46cb-ac8d-8e1bd8242066-kube-api-access-pkqxm" (OuterVolumeSpecName: "kube-api-access-pkqxm") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "kube-api-access-pkqxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.562030 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6" (OuterVolumeSpecName: "glance") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "pvc-fd7473b6-a721-4577-93f7-bea6228887e6". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.602384 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.602683 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-config-data" (OuterVolumeSpecName: "config-data") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.606677 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.606722 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.606750 4791 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") on node \"crc\" " Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.606762 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.606773 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkqxm\" (UniqueName: \"kubernetes.io/projected/477be1bd-91b3-46cb-ac8d-8e1bd8242066-kube-api-access-pkqxm\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.606782 4791 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/477be1bd-91b3-46cb-ac8d-8e1bd8242066-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.607688 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "477be1bd-91b3-46cb-ac8d-8e1bd8242066" (UID: "477be1bd-91b3-46cb-ac8d-8e1bd8242066"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.661134 4791 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.661321 4791 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-fd7473b6-a721-4577-93f7-bea6228887e6" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6") on node "crc" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.709609 4791 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/477be1bd-91b3-46cb-ac8d-8e1bd8242066-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:34 crc kubenswrapper[4791]: I1208 21:44:34.709654 4791 reconciler_common.go:293] "Volume detached for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.146273 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"477be1bd-91b3-46cb-ac8d-8e1bd8242066","Type":"ContainerDied","Data":"1ba9a8d18a906fa1bb1117f2d09f67605b1719180197ad411a7ec95067441b16"} Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.146566 4791 scope.go:117] "RemoveContainer" containerID="f05edc8ce19b5afe71859980edb82c0121f1e4915bad8def8ab725c39e8e82fa" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.146676 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.188778 4791 scope.go:117] "RemoveContainer" containerID="b76ecd3a1ebf375f740d54b66e488f64605f0612f4fde44c2a1ad904ceb0ff88" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.204784 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.225334 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.255083 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259365 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-log" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259399 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-log" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259429 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerName="init" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259439 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerName="init" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259496 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="997b752d-4e35-4780-a978-35cda6d832fa" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259509 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="997b752d-4e35-4780-a978-35cda6d832fa" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259528 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-httpd" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259536 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-httpd" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259557 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerName="heat-api" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259568 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerName="heat-api" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259583 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="997b752d-4e35-4780-a978-35cda6d832fa" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259598 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="997b752d-4e35-4780-a978-35cda6d832fa" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259628 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerName="heat-api" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259635 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerName="heat-api" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259672 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ae5100-9cc7-462c-8424-d8837d636fa3" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259680 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ae5100-9cc7-462c-8424-d8837d636fa3" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.259698 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerName="dnsmasq-dns" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.259724 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerName="dnsmasq-dns" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.260386 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerName="heat-api" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.260423 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-httpd" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.260448 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" containerName="glance-log" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.260475 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="997b752d-4e35-4780-a978-35cda6d832fa" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.260501 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="997b752d-4e35-4780-a978-35cda6d832fa" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.260543 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4ae5100-9cc7-462c-8424-d8837d636fa3" containerName="heat-cfnapi" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.260556 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d81e17e-0d6f-461b-8bd9-8d277de96edd" containerName="dnsmasq-dns" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.261620 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba96aae-1964-4b6a-b1dd-204997df9230" containerName="heat-api" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.264868 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.264951 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.274516 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.274659 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.277847 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.278112 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.425466 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j86fm\" (UniqueName: \"kubernetes.io/projected/2afefbde-0eb6-4887-94b7-c018e79f1ddb-kube-api-access-j86fm\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.425851 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2afefbde-0eb6-4887-94b7-c018e79f1ddb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.425998 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-scripts\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.426108 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.426268 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.426305 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-config-data\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.426349 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2afefbde-0eb6-4887-94b7-c018e79f1ddb-logs\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.426456 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.529347 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.529536 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j86fm\" (UniqueName: \"kubernetes.io/projected/2afefbde-0eb6-4887-94b7-c018e79f1ddb-kube-api-access-j86fm\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.529598 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2afefbde-0eb6-4887-94b7-c018e79f1ddb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.529643 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-scripts\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.530323 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2afefbde-0eb6-4887-94b7-c018e79f1ddb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.532019 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.532214 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.532243 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-config-data\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.532319 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2afefbde-0eb6-4887-94b7-c018e79f1ddb-logs\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.535775 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.537400 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-scripts\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.537585 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.537635 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2b1f4bc6b3ef864632668d1177fe0017bb99c34b82e704a930801276839aa0f1/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.538652 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2afefbde-0eb6-4887-94b7-c018e79f1ddb-logs\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.544645 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.550493 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j86fm\" (UniqueName: \"kubernetes.io/projected/2afefbde-0eb6-4887-94b7-c018e79f1ddb-kube-api-access-j86fm\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.550955 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2afefbde-0eb6-4887-94b7-c018e79f1ddb-config-data\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.632439 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477be1bd-91b3-46cb-ac8d-8e1bd8242066" path="/var/lib/kubelet/pods/477be1bd-91b3-46cb-ac8d-8e1bd8242066/volumes" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.866699 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fd7473b6-a721-4577-93f7-bea6228887e6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd7473b6-a721-4577-93f7-bea6228887e6\") pod \"glance-default-external-api-0\" (UID: \"2afefbde-0eb6-4887-94b7-c018e79f1ddb\") " pod="openstack/glance-default-external-api-0" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.884310 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.885193 4791 scope.go:117] "RemoveContainer" containerID="6aaa3b083eed387233afd8e27c4bf5a9ae6d9cb075ffa437c67d681e065ad29e" Dec 08 21:44:35 crc kubenswrapper[4791]: E1208 21:44:35.885448 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:44:35 crc kubenswrapper[4791]: I1208 21:44:35.907308 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.198059 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.208750 4791 generic.go:334] "Generic (PLEG): container finished" podID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerID="3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111" exitCode=0 Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.208805 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5","Type":"ContainerDied","Data":"3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111"} Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.208839 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5","Type":"ContainerDied","Data":"d1efd7b7dac755b189e044b492a51022ae88d1005d04133b560c91488aa64709"} Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.208838 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.208864 4791 scope.go:117] "RemoveContainer" containerID="3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.276950 4791 scope.go:117] "RemoveContainer" containerID="f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.301292 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-internal-tls-certs\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.301931 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.302035 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-config-data\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.302156 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-logs\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.302265 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-httpd-run\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.302293 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-scripts\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.302436 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf4nt\" (UniqueName: \"kubernetes.io/projected/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-kube-api-access-zf4nt\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.302503 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-combined-ca-bundle\") pod \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\" (UID: \"36cb82aa-4e45-4bb1-9192-de3caeb2b9a5\") " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.304994 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-logs" (OuterVolumeSpecName: "logs") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.305041 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.305826 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.305845 4791 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.306516 4791 scope.go:117] "RemoveContainer" containerID="3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111" Dec 08 21:44:36 crc kubenswrapper[4791]: E1208 21:44:36.308119 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111\": container with ID starting with 3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111 not found: ID does not exist" containerID="3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.308178 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111"} err="failed to get container status \"3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111\": rpc error: code = NotFound desc = could not find container \"3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111\": container with ID starting with 3e241ea4a4640b677ffaf1eabb89c7d4e67a0afd46c80c93461f1faabc36f111 not found: ID does not exist" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.308209 4791 scope.go:117] "RemoveContainer" containerID="f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760" Dec 08 21:44:36 crc kubenswrapper[4791]: E1208 21:44:36.309507 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760\": container with ID starting with f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760 not found: ID does not exist" containerID="f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.309541 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760"} err="failed to get container status \"f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760\": rpc error: code = NotFound desc = could not find container \"f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760\": container with ID starting with f66e81e77504abfaa628991fd673001b373e18a3f5362cfe5dafd86976494760 not found: ID does not exist" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.319171 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-scripts" (OuterVolumeSpecName: "scripts") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.328003 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-kube-api-access-zf4nt" (OuterVolumeSpecName: "kube-api-access-zf4nt") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "kube-api-access-zf4nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.354327 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.381502 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc" (OuterVolumeSpecName: "glance") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "pvc-3530eacc-1908-4492-a59f-15d59644c0dc". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.385540 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.407873 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.407909 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf4nt\" (UniqueName: \"kubernetes.io/projected/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-kube-api-access-zf4nt\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.407927 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.407940 4791 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.407974 4791 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") on node \"crc\" " Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.408596 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-config-data" (OuterVolumeSpecName: "config-data") pod "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" (UID: "36cb82aa-4e45-4bb1-9192-de3caeb2b9a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.457543 4791 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.457788 4791 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-3530eacc-1908-4492-a59f-15d59644c0dc" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc") on node "crc" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.510598 4791 reconciler_common.go:293] "Volume detached for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.510916 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.638625 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.657885 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.684204 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:44:36 crc kubenswrapper[4791]: E1208 21:44:36.685144 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-httpd" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.685171 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-httpd" Dec 08 21:44:36 crc kubenswrapper[4791]: E1208 21:44:36.685188 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-log" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.685226 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-log" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.685563 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-log" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.685581 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" containerName="glance-httpd" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.688700 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.694003 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.694207 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.700081 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.745904 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818403 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818507 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818569 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c41bbf-581f-4055-8855-6775f65b2409-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818595 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818624 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9c41bbf-581f-4055-8855-6775f65b2409-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818649 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g2cf\" (UniqueName: \"kubernetes.io/projected/f9c41bbf-581f-4055-8855-6775f65b2409-kube-api-access-2g2cf\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818682 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.818938 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.921822 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9c41bbf-581f-4055-8855-6775f65b2409-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.922277 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9c41bbf-581f-4055-8855-6775f65b2409-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.922346 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g2cf\" (UniqueName: \"kubernetes.io/projected/f9c41bbf-581f-4055-8855-6775f65b2409-kube-api-access-2g2cf\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.922489 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.923147 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.923364 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.923489 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.923620 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c41bbf-581f-4055-8855-6775f65b2409-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.923666 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.924401 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c41bbf-581f-4055-8855-6775f65b2409-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.928315 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.928382 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.929616 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.930538 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.930570 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b6e75dc9bbe8f5543873ba1da9c9ef2677b6e25aaa3d050091b399bf8011b4a0/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.933959 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c41bbf-581f-4055-8855-6775f65b2409-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:36 crc kubenswrapper[4791]: I1208 21:44:36.938804 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g2cf\" (UniqueName: \"kubernetes.io/projected/f9c41bbf-581f-4055-8855-6775f65b2409-kube-api-access-2g2cf\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:37 crc kubenswrapper[4791]: I1208 21:44:37.023112 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3530eacc-1908-4492-a59f-15d59644c0dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3530eacc-1908-4492-a59f-15d59644c0dc\") pod \"glance-default-internal-api-0\" (UID: \"f9c41bbf-581f-4055-8855-6775f65b2409\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:44:37 crc kubenswrapper[4791]: I1208 21:44:37.230504 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2afefbde-0eb6-4887-94b7-c018e79f1ddb","Type":"ContainerStarted","Data":"3543c3ce9f88008b03bc0928f13c688a6046d297580dc8fa67e5738ba828db46"} Dec 08 21:44:37 crc kubenswrapper[4791]: I1208 21:44:37.312682 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:37 crc kubenswrapper[4791]: I1208 21:44:37.622995 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36cb82aa-4e45-4bb1-9192-de3caeb2b9a5" path="/var/lib/kubelet/pods/36cb82aa-4e45-4bb1-9192-de3caeb2b9a5/volumes" Dec 08 21:44:38 crc kubenswrapper[4791]: I1208 21:44:38.020297 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:44:38 crc kubenswrapper[4791]: I1208 21:44:38.273065 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2afefbde-0eb6-4887-94b7-c018e79f1ddb","Type":"ContainerStarted","Data":"48c717aadac91e99e2a8e5ca47538b69bff54a80bae0f32eab406bedb59bb234"} Dec 08 21:44:38 crc kubenswrapper[4791]: I1208 21:44:38.279176 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9c41bbf-581f-4055-8855-6775f65b2409","Type":"ContainerStarted","Data":"c58786d3df2cd46e829ba2aa7e65486167f149eacd897f80555295284626cfad"} Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.124733 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.204789 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66wmc\" (UniqueName: \"kubernetes.io/projected/30ff68d8-19dd-41a7-b38d-9f27571b27bc-kube-api-access-66wmc\") pod \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.204970 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-combined-ca-bundle\") pod \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.204993 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data\") pod \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.205076 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data-custom\") pod \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\" (UID: \"30ff68d8-19dd-41a7-b38d-9f27571b27bc\") " Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.245158 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "30ff68d8-19dd-41a7-b38d-9f27571b27bc" (UID: "30ff68d8-19dd-41a7-b38d-9f27571b27bc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.245638 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ff68d8-19dd-41a7-b38d-9f27571b27bc-kube-api-access-66wmc" (OuterVolumeSpecName: "kube-api-access-66wmc") pod "30ff68d8-19dd-41a7-b38d-9f27571b27bc" (UID: "30ff68d8-19dd-41a7-b38d-9f27571b27bc"). InnerVolumeSpecName "kube-api-access-66wmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.307366 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66wmc\" (UniqueName: \"kubernetes.io/projected/30ff68d8-19dd-41a7-b38d-9f27571b27bc-kube-api-access-66wmc\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.307673 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.308814 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data" (OuterVolumeSpecName: "config-data") pod "30ff68d8-19dd-41a7-b38d-9f27571b27bc" (UID: "30ff68d8-19dd-41a7-b38d-9f27571b27bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.332051 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30ff68d8-19dd-41a7-b38d-9f27571b27bc" (UID: "30ff68d8-19dd-41a7-b38d-9f27571b27bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.357938 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9c41bbf-581f-4055-8855-6775f65b2409","Type":"ContainerStarted","Data":"d702cb869a0b4ca505042cb2e4e2d6ce149cbed0c7043fc9c04fe882eb3c0fb9"} Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.390881 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2afefbde-0eb6-4887-94b7-c018e79f1ddb","Type":"ContainerStarted","Data":"c1060af34d134b5230c9d44b2176f537077704e24d3020b4e60c14c7000586b8"} Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.410308 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.410338 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff68d8-19dd-41a7-b38d-9f27571b27bc-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.427126 4791 generic.go:334] "Generic (PLEG): container finished" podID="30ff68d8-19dd-41a7-b38d-9f27571b27bc" containerID="45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" exitCode=0 Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.427191 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6fdc69876f-c67rs" event={"ID":"30ff68d8-19dd-41a7-b38d-9f27571b27bc","Type":"ContainerDied","Data":"45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d"} Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.427230 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6fdc69876f-c67rs" event={"ID":"30ff68d8-19dd-41a7-b38d-9f27571b27bc","Type":"ContainerDied","Data":"8c64135f1d19785b5f063675c2ed46d32a8f287ecb27a4cd67c414be2cd57724"} Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.427256 4791 scope.go:117] "RemoveContainer" containerID="45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.427505 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6fdc69876f-c67rs" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.447440 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.447417603 podStartE2EDuration="4.447417603s" podCreationTimestamp="2025-12-08 21:44:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:39.424954971 +0000 UTC m=+1556.123713326" watchObservedRunningTime="2025-12-08 21:44:39.447417603 +0000 UTC m=+1556.146175948" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.500578 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-6fdc69876f-c67rs"] Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.503798 4791 scope.go:117] "RemoveContainer" containerID="45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" Dec 08 21:44:39 crc kubenswrapper[4791]: E1208 21:44:39.506474 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d\": container with ID starting with 45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d not found: ID does not exist" containerID="45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.506519 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d"} err="failed to get container status \"45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d\": rpc error: code = NotFound desc = could not find container \"45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d\": container with ID starting with 45be5921a4a0e581b14dc5be8e3c3c955824db4bc5aff6990bd2e34e3810e37d not found: ID does not exist" Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.541279 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-6fdc69876f-c67rs"] Dec 08 21:44:39 crc kubenswrapper[4791]: I1208 21:44:39.615898 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ff68d8-19dd-41a7-b38d-9f27571b27bc" path="/var/lib/kubelet/pods/30ff68d8-19dd-41a7-b38d-9f27571b27bc/volumes" Dec 08 21:44:40 crc kubenswrapper[4791]: I1208 21:44:40.441530 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9c41bbf-581f-4055-8855-6775f65b2409","Type":"ContainerStarted","Data":"e0f465a71f12ec59fff01d2c5d3c2ad305e481bd84017427b7b3971ad5c8e6d0"} Dec 08 21:44:40 crc kubenswrapper[4791]: I1208 21:44:40.473596 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.473572122 podStartE2EDuration="4.473572122s" podCreationTimestamp="2025-12-08 21:44:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:40.458567133 +0000 UTC m=+1557.157325478" watchObservedRunningTime="2025-12-08 21:44:40.473572122 +0000 UTC m=+1557.172330477" Dec 08 21:44:41 crc kubenswrapper[4791]: I1208 21:44:41.457701 4791 generic.go:334] "Generic (PLEG): container finished" podID="e02fffe8-6208-48f0-ba89-6d54f07f5ae4" containerID="55fad5012a7e2bb2fe91c5086e8fd40d5b5fddcc9b496c17fdb3a2c863152c60" exitCode=0 Dec 08 21:44:41 crc kubenswrapper[4791]: I1208 21:44:41.459080 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" event={"ID":"e02fffe8-6208-48f0-ba89-6d54f07f5ae4","Type":"ContainerDied","Data":"55fad5012a7e2bb2fe91c5086e8fd40d5b5fddcc9b496c17fdb3a2c863152c60"} Dec 08 21:44:42 crc kubenswrapper[4791]: I1208 21:44:42.925350 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.094361 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fzjz\" (UniqueName: \"kubernetes.io/projected/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-kube-api-access-4fzjz\") pod \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.094482 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-scripts\") pod \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.094561 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-config-data\") pod \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.094766 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-combined-ca-bundle\") pod \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\" (UID: \"e02fffe8-6208-48f0-ba89-6d54f07f5ae4\") " Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.102889 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-scripts" (OuterVolumeSpecName: "scripts") pod "e02fffe8-6208-48f0-ba89-6d54f07f5ae4" (UID: "e02fffe8-6208-48f0-ba89-6d54f07f5ae4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.109874 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-kube-api-access-4fzjz" (OuterVolumeSpecName: "kube-api-access-4fzjz") pod "e02fffe8-6208-48f0-ba89-6d54f07f5ae4" (UID: "e02fffe8-6208-48f0-ba89-6d54f07f5ae4"). InnerVolumeSpecName "kube-api-access-4fzjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.130365 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-config-data" (OuterVolumeSpecName: "config-data") pod "e02fffe8-6208-48f0-ba89-6d54f07f5ae4" (UID: "e02fffe8-6208-48f0-ba89-6d54f07f5ae4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.133226 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e02fffe8-6208-48f0-ba89-6d54f07f5ae4" (UID: "e02fffe8-6208-48f0-ba89-6d54f07f5ae4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.198165 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.198213 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.198228 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fzjz\" (UniqueName: \"kubernetes.io/projected/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-kube-api-access-4fzjz\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.198242 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02fffe8-6208-48f0-ba89-6d54f07f5ae4-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.483252 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" event={"ID":"e02fffe8-6208-48f0-ba89-6d54f07f5ae4","Type":"ContainerDied","Data":"e3574a870767712005dae8df85840379afd67a3f752a5905d1b67282897c62bf"} Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.483303 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3574a870767712005dae8df85840379afd67a3f752a5905d1b67282897c62bf" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.483323 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bm9lh" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.589317 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 08 21:44:43 crc kubenswrapper[4791]: E1208 21:44:43.589905 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ff68d8-19dd-41a7-b38d-9f27571b27bc" containerName="heat-engine" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.589922 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ff68d8-19dd-41a7-b38d-9f27571b27bc" containerName="heat-engine" Dec 08 21:44:43 crc kubenswrapper[4791]: E1208 21:44:43.589945 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e02fffe8-6208-48f0-ba89-6d54f07f5ae4" containerName="nova-cell0-conductor-db-sync" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.589958 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e02fffe8-6208-48f0-ba89-6d54f07f5ae4" containerName="nova-cell0-conductor-db-sync" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.590181 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e02fffe8-6208-48f0-ba89-6d54f07f5ae4" containerName="nova-cell0-conductor-db-sync" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.590201 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ff68d8-19dd-41a7-b38d-9f27571b27bc" containerName="heat-engine" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.591055 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.593282 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-55twv" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.593457 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.640492 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d854578-6157-4d97-879e-81bcb802f28d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.640529 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d854578-6157-4d97-879e-81bcb802f28d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.640554 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27jss\" (UniqueName: \"kubernetes.io/projected/1d854578-6157-4d97-879e-81bcb802f28d-kube-api-access-27jss\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.710930 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.743678 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d854578-6157-4d97-879e-81bcb802f28d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.743895 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d854578-6157-4d97-879e-81bcb802f28d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.743931 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27jss\" (UniqueName: \"kubernetes.io/projected/1d854578-6157-4d97-879e-81bcb802f28d-kube-api-access-27jss\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.750658 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d854578-6157-4d97-879e-81bcb802f28d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.760325 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27jss\" (UniqueName: \"kubernetes.io/projected/1d854578-6157-4d97-879e-81bcb802f28d-kube-api-access-27jss\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:43 crc kubenswrapper[4791]: I1208 21:44:43.765672 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d854578-6157-4d97-879e-81bcb802f28d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1d854578-6157-4d97-879e-81bcb802f28d\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:44 crc kubenswrapper[4791]: I1208 21:44:44.016576 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:44 crc kubenswrapper[4791]: W1208 21:44:44.622565 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d854578_6157_4d97_879e_81bcb802f28d.slice/crio-4ef7380dcf30b58b7b5766d1714d94aba993f72a1168b129856a6f2c14f5f912 WatchSource:0}: Error finding container 4ef7380dcf30b58b7b5766d1714d94aba993f72a1168b129856a6f2c14f5f912: Status 404 returned error can't find the container with id 4ef7380dcf30b58b7b5766d1714d94aba993f72a1168b129856a6f2c14f5f912 Dec 08 21:44:44 crc kubenswrapper[4791]: I1208 21:44:44.626554 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.509180 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1d854578-6157-4d97-879e-81bcb802f28d","Type":"ContainerStarted","Data":"2ec3b0cade35087d270ba235850d7b310a35ceefe4442b707b19b429ac035427"} Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.509483 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1d854578-6157-4d97-879e-81bcb802f28d","Type":"ContainerStarted","Data":"4ef7380dcf30b58b7b5766d1714d94aba993f72a1168b129856a6f2c14f5f912"} Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.509617 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.534944 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.534926595 podStartE2EDuration="2.534926595s" podCreationTimestamp="2025-12-08 21:44:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:45.527837141 +0000 UTC m=+1562.226595486" watchObservedRunningTime="2025-12-08 21:44:45.534926595 +0000 UTC m=+1562.233684940" Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.884416 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.886147 4791 scope.go:117] "RemoveContainer" containerID="6aaa3b083eed387233afd8e27c4bf5a9ae6d9cb075ffa437c67d681e065ad29e" Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.912596 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.912658 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.951232 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:44:45 crc kubenswrapper[4791]: I1208 21:44:45.987982 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:44:46 crc kubenswrapper[4791]: I1208 21:44:46.527922 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330"} Dec 08 21:44:46 crc kubenswrapper[4791]: I1208 21:44:46.528235 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:44:46 crc kubenswrapper[4791]: I1208 21:44:46.529592 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:44:46 crc kubenswrapper[4791]: I1208 21:44:46.529692 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:44:47 crc kubenswrapper[4791]: I1208 21:44:47.314289 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:47 crc kubenswrapper[4791]: I1208 21:44:47.314344 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:47 crc kubenswrapper[4791]: I1208 21:44:47.362481 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:47 crc kubenswrapper[4791]: I1208 21:44:47.362537 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:47 crc kubenswrapper[4791]: I1208 21:44:47.539969 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:47 crc kubenswrapper[4791]: I1208 21:44:47.540815 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:48 crc kubenswrapper[4791]: I1208 21:44:48.552094 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:44:48 crc kubenswrapper[4791]: I1208 21:44:48.552580 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:44:48 crc kubenswrapper[4791]: I1208 21:44:48.696675 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:44:48 crc kubenswrapper[4791]: I1208 21:44:48.698325 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.115523 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.752655 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-6tl54"] Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.754391 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.757063 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.757243 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.775328 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6tl54"] Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.901448 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.902688 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-scripts\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.902845 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqpr6\" (UniqueName: \"kubernetes.io/projected/f5bdb996-544f-412b-8279-09b9e0bc2510-kube-api-access-zqpr6\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.903063 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-config-data\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.947528 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.949923 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:49 crc kubenswrapper[4791]: I1208 21:44:49.960217 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.005228 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-config-data\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.005398 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.005446 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-scripts\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.005482 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqpr6\" (UniqueName: \"kubernetes.io/projected/f5bdb996-544f-412b-8279-09b9e0bc2510-kube-api-access-zqpr6\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.017219 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.060785 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-config-data\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.061773 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-scripts\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.062090 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.116844 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.116928 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-config-data\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.116972 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e204b013-c633-4311-a89a-0547f8cfc6e3-logs\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.116994 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxkdh\" (UniqueName: \"kubernetes.io/projected/e204b013-c633-4311-a89a-0547f8cfc6e3-kube-api-access-qxkdh\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.159902 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqpr6\" (UniqueName: \"kubernetes.io/projected/f5bdb996-544f-412b-8279-09b9e0bc2510-kube-api-access-zqpr6\") pod \"nova-cell0-cell-mapping-6tl54\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.236807 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.236886 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-config-data\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.236936 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e204b013-c633-4311-a89a-0547f8cfc6e3-logs\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.236958 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxkdh\" (UniqueName: \"kubernetes.io/projected/e204b013-c633-4311-a89a-0547f8cfc6e3-kube-api-access-qxkdh\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.238056 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e204b013-c633-4311-a89a-0547f8cfc6e3-logs\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.262762 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-config-data\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.271858 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.282983 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.290104 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.293244 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxkdh\" (UniqueName: \"kubernetes.io/projected/e204b013-c633-4311-a89a-0547f8cfc6e3-kube-api-access-qxkdh\") pod \"nova-api-0\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.294347 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.316407 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.330357 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.371261 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.377307 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.380108 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.380889 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.390452 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.411889 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.414240 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.435218 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.438669 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.460381 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.460469 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.460530 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.460559 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rghxt\" (UniqueName: \"kubernetes.io/projected/74378ef2-518b-4974-bf29-33b29b6ab17d-kube-api-access-rghxt\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.460606 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-config-data\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.460687 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwm4b\" (UniqueName: \"kubernetes.io/projected/033789b4-c9c3-4364-a6d5-db34740240b4-kube-api-access-xwm4b\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.482874 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-6nlb8"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.487686 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.514273 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-6nlb8"] Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563117 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563520 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rghxt\" (UniqueName: \"kubernetes.io/projected/74378ef2-518b-4974-bf29-33b29b6ab17d-kube-api-access-rghxt\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563593 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-config-data\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563645 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-logs\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563673 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563756 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-config-data\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563786 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkpcx\" (UniqueName: \"kubernetes.io/projected/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-kube-api-access-mkpcx\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563899 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-svc\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.563937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwm4b\" (UniqueName: \"kubernetes.io/projected/033789b4-c9c3-4364-a6d5-db34740240b4-kube-api-access-xwm4b\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.564539 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-swift-storage-0\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.564694 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l4m8\" (UniqueName: \"kubernetes.io/projected/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-kube-api-access-7l4m8\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.565685 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-sb\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.565763 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.565869 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-nb\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.565925 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.565949 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-config\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.581663 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.587150 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.587889 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-config-data\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.588545 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwm4b\" (UniqueName: \"kubernetes.io/projected/033789b4-c9c3-4364-a6d5-db34740240b4-kube-api-access-xwm4b\") pod \"nova-scheduler-0\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.590490 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rghxt\" (UniqueName: \"kubernetes.io/projected/74378ef2-518b-4974-bf29-33b29b6ab17d-kube-api-access-rghxt\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.602102 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672329 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-nb\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672421 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-config\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672560 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-config-data\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672588 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-logs\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672614 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672688 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkpcx\" (UniqueName: \"kubernetes.io/projected/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-kube-api-access-mkpcx\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672825 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-svc\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672922 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-swift-storage-0\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.672993 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l4m8\" (UniqueName: \"kubernetes.io/projected/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-kube-api-access-7l4m8\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.673040 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-sb\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.674352 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-sb\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.675697 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-nb\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.676481 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-svc\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.678395 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-logs\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.680606 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-swift-storage-0\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.683460 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-config\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.685402 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.690496 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-config-data\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.703594 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkpcx\" (UniqueName: \"kubernetes.io/projected/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-kube-api-access-mkpcx\") pod \"nova-metadata-0\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.703599 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l4m8\" (UniqueName: \"kubernetes.io/projected/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-kube-api-access-7l4m8\") pod \"dnsmasq-dns-568d7fd7cf-6nlb8\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.709801 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.711179 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.715359 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.760871 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.790114 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.846350 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:50 crc kubenswrapper[4791]: I1208 21:44:50.912967 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:44:51 crc kubenswrapper[4791]: I1208 21:44:51.029309 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:51 crc kubenswrapper[4791]: I1208 21:44:51.405912 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6tl54"] Dec 08 21:44:51 crc kubenswrapper[4791]: I1208 21:44:51.423375 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:51 crc kubenswrapper[4791]: I1208 21:44:51.685485 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e204b013-c633-4311-a89a-0547f8cfc6e3","Type":"ContainerStarted","Data":"efa1a5f5a57679c8eef39fea9807190ec855fdb7dfd53f1c6de0d2cb51261c60"} Dec 08 21:44:51 crc kubenswrapper[4791]: I1208 21:44:51.690341 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"033789b4-c9c3-4364-a6d5-db34740240b4","Type":"ContainerStarted","Data":"2b38181e6bae1f16a2c1899652ba6d085fb7a1664c8725dbf3ecebbdcd7b49d1"} Dec 08 21:44:51 crc kubenswrapper[4791]: I1208 21:44:51.692043 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6tl54" event={"ID":"f5bdb996-544f-412b-8279-09b9e0bc2510","Type":"ContainerStarted","Data":"abbe4133c1c8e64aa520897760c98045592c7180122900f45fc6c9e85b23bb4f"} Dec 08 21:44:51 crc kubenswrapper[4791]: I1208 21:44:51.834408 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.053176 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.141429 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-6nlb8"] Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.179350 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wkrbl"] Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.181232 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.197789 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.198054 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.199592 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wkrbl"] Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.234456 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-scripts\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.234569 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5584\" (UniqueName: \"kubernetes.io/projected/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-kube-api-access-r5584\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.234684 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.234770 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-config-data\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.339178 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.339282 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-config-data\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.339443 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-scripts\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.339473 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5584\" (UniqueName: \"kubernetes.io/projected/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-kube-api-access-r5584\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.349641 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-config-data\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.349784 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.351180 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-scripts\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.365444 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5584\" (UniqueName: \"kubernetes.io/projected/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-kube-api-access-r5584\") pod \"nova-cell1-conductor-db-sync-wkrbl\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.668386 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.724007 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf","Type":"ContainerStarted","Data":"fddb222e4123d7c3bb3dffad40b9fa187aabd3cc151a6c623a1dbecd21137bb1"} Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.726727 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6tl54" event={"ID":"f5bdb996-544f-412b-8279-09b9e0bc2510","Type":"ContainerStarted","Data":"8e3dbe569a4ad5f2c161b5771769cc3381bf0ca2f72a94a5aee50f86d778ade9"} Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.742273 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"74378ef2-518b-4974-bf29-33b29b6ab17d","Type":"ContainerStarted","Data":"a4ae6e8dee4e0e378d94c80d039b24e7dd8b6d0055b1a1016d3faaacc83ebed0"} Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.748764 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-6tl54" podStartSLOduration=3.748744399 podStartE2EDuration="3.748744399s" podCreationTimestamp="2025-12-08 21:44:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:52.746875713 +0000 UTC m=+1569.445634058" watchObservedRunningTime="2025-12-08 21:44:52.748744399 +0000 UTC m=+1569.447502744" Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.751384 4791 generic.go:334] "Generic (PLEG): container finished" podID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerID="e1989dbd887675fa9a730372a0e3a94f045cd664ce7d4b06bf600bee73bd3e60" exitCode=0 Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.751442 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" event={"ID":"2af01a9d-314c-4443-bd34-ba54d4d5a3fd","Type":"ContainerDied","Data":"e1989dbd887675fa9a730372a0e3a94f045cd664ce7d4b06bf600bee73bd3e60"} Dec 08 21:44:52 crc kubenswrapper[4791]: I1208 21:44:52.751478 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" event={"ID":"2af01a9d-314c-4443-bd34-ba54d4d5a3fd","Type":"ContainerStarted","Data":"82dd26d2085332d1bac34713fa42fdc9c9771e4b873dcd9e7bc5dcf74989cbe6"} Dec 08 21:44:53 crc kubenswrapper[4791]: I1208 21:44:53.352926 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wkrbl"] Dec 08 21:44:53 crc kubenswrapper[4791]: I1208 21:44:53.772091 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" event={"ID":"2af01a9d-314c-4443-bd34-ba54d4d5a3fd","Type":"ContainerStarted","Data":"48914afeba64de2d059d0887ff0a37e0596c8341a2eb0e98f3a2b07176164fd1"} Dec 08 21:44:53 crc kubenswrapper[4791]: I1208 21:44:53.772452 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:44:53 crc kubenswrapper[4791]: I1208 21:44:53.806491 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" podStartSLOduration=3.806465954 podStartE2EDuration="3.806465954s" podCreationTimestamp="2025-12-08 21:44:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:53.797235977 +0000 UTC m=+1570.495994332" watchObservedRunningTime="2025-12-08 21:44:53.806465954 +0000 UTC m=+1570.505224299" Dec 08 21:44:54 crc kubenswrapper[4791]: I1208 21:44:54.361575 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:44:54 crc kubenswrapper[4791]: I1208 21:44:54.456917 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:55 crc kubenswrapper[4791]: I1208 21:44:55.807836 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" event={"ID":"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40","Type":"ContainerStarted","Data":"21e8766d53a1c43bf50880220fb5dfc78dca4ce893603f82eddfb9372399e41a"} Dec 08 21:44:55 crc kubenswrapper[4791]: I1208 21:44:55.886693 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.827143 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"74378ef2-518b-4974-bf29-33b29b6ab17d","Type":"ContainerStarted","Data":"c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000"} Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.828896 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="74378ef2-518b-4974-bf29-33b29b6ab17d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000" gracePeriod=30 Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.832884 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" event={"ID":"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40","Type":"ContainerStarted","Data":"34b6a0637268b786e00dbaf8e3334ebb53ccf8ed29eca966d9e836eafc366ba3"} Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.841967 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf","Type":"ContainerStarted","Data":"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c"} Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.843428 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e204b013-c633-4311-a89a-0547f8cfc6e3","Type":"ContainerStarted","Data":"0ea6761a76e32dcc035e62e1153dd1391d0bd691a64b982d01b6f0052ff68e97"} Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.844655 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"033789b4-c9c3-4364-a6d5-db34740240b4","Type":"ContainerStarted","Data":"33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132"} Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.855110 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.449588574 podStartE2EDuration="6.855088545s" podCreationTimestamp="2025-12-08 21:44:50 +0000 UTC" firstStartedPulling="2025-12-08 21:44:51.843860752 +0000 UTC m=+1568.542619097" lastFinishedPulling="2025-12-08 21:44:56.249360723 +0000 UTC m=+1572.948119068" observedRunningTime="2025-12-08 21:44:56.851305402 +0000 UTC m=+1573.550063747" watchObservedRunningTime="2025-12-08 21:44:56.855088545 +0000 UTC m=+1573.553846900" Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.892955 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.068434675 podStartE2EDuration="6.892930126s" podCreationTimestamp="2025-12-08 21:44:50 +0000 UTC" firstStartedPulling="2025-12-08 21:44:51.425062257 +0000 UTC m=+1568.123820602" lastFinishedPulling="2025-12-08 21:44:56.249557708 +0000 UTC m=+1572.948316053" observedRunningTime="2025-12-08 21:44:56.872556105 +0000 UTC m=+1573.571314450" watchObservedRunningTime="2025-12-08 21:44:56.892930126 +0000 UTC m=+1573.591688471" Dec 08 21:44:56 crc kubenswrapper[4791]: I1208 21:44:56.897270 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" podStartSLOduration=4.897252972 podStartE2EDuration="4.897252972s" podCreationTimestamp="2025-12-08 21:44:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:56.890114446 +0000 UTC m=+1573.588872801" watchObservedRunningTime="2025-12-08 21:44:56.897252972 +0000 UTC m=+1573.596011317" Dec 08 21:44:57 crc kubenswrapper[4791]: I1208 21:44:57.870079 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e204b013-c633-4311-a89a-0547f8cfc6e3","Type":"ContainerStarted","Data":"a1fe9235dcf4d1679495f5659b82e903935c2aba259f15c6a38c653ec330afa7"} Dec 08 21:44:57 crc kubenswrapper[4791]: I1208 21:44:57.876977 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-log" containerID="cri-o://1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c" gracePeriod=30 Dec 08 21:44:57 crc kubenswrapper[4791]: I1208 21:44:57.877410 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf","Type":"ContainerStarted","Data":"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc"} Dec 08 21:44:57 crc kubenswrapper[4791]: I1208 21:44:57.877822 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-metadata" containerID="cri-o://7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc" gracePeriod=30 Dec 08 21:44:57 crc kubenswrapper[4791]: I1208 21:44:57.915223 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.825520387 podStartE2EDuration="8.915197898s" podCreationTimestamp="2025-12-08 21:44:49 +0000 UTC" firstStartedPulling="2025-12-08 21:44:51.159878917 +0000 UTC m=+1567.858637262" lastFinishedPulling="2025-12-08 21:44:56.249556428 +0000 UTC m=+1572.948314773" observedRunningTime="2025-12-08 21:44:57.88717942 +0000 UTC m=+1574.585937765" watchObservedRunningTime="2025-12-08 21:44:57.915197898 +0000 UTC m=+1574.613956243" Dec 08 21:44:57 crc kubenswrapper[4791]: I1208 21:44:57.938558 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.758930625 podStartE2EDuration="7.938529862s" podCreationTimestamp="2025-12-08 21:44:50 +0000 UTC" firstStartedPulling="2025-12-08 21:44:52.072407051 +0000 UTC m=+1568.771165396" lastFinishedPulling="2025-12-08 21:44:56.252006288 +0000 UTC m=+1572.950764633" observedRunningTime="2025-12-08 21:44:57.910631446 +0000 UTC m=+1574.609389791" watchObservedRunningTime="2025-12-08 21:44:57.938529862 +0000 UTC m=+1574.637288207" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.578522 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.637683 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkpcx\" (UniqueName: \"kubernetes.io/projected/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-kube-api-access-mkpcx\") pod \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.638179 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-config-data\") pod \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.638436 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-combined-ca-bundle\") pod \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.638574 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-logs\") pod \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\" (UID: \"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf\") " Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.644782 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-logs" (OuterVolumeSpecName: "logs") pod "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" (UID: "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.659733 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-kube-api-access-mkpcx" (OuterVolumeSpecName: "kube-api-access-mkpcx") pod "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" (UID: "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf"). InnerVolumeSpecName "kube-api-access-mkpcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.680655 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" (UID: "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.681672 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-config-data" (OuterVolumeSpecName: "config-data") pod "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" (UID: "d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.742915 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkpcx\" (UniqueName: \"kubernetes.io/projected/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-kube-api-access-mkpcx\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.742953 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.742963 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.742973 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.890261 4791 generic.go:334] "Generic (PLEG): container finished" podID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerID="7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc" exitCode=0 Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.891892 4791 generic.go:334] "Generic (PLEG): container finished" podID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerID="1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c" exitCode=143 Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.893299 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.893579 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf","Type":"ContainerDied","Data":"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc"} Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.893763 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf","Type":"ContainerDied","Data":"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c"} Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.893793 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf","Type":"ContainerDied","Data":"fddb222e4123d7c3bb3dffad40b9fa187aabd3cc151a6c623a1dbecd21137bb1"} Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.893830 4791 scope.go:117] "RemoveContainer" containerID="7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.951040 4791 scope.go:117] "RemoveContainer" containerID="1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.970881 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.979657 4791 scope.go:117] "RemoveContainer" containerID="7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc" Dec 08 21:44:58 crc kubenswrapper[4791]: E1208 21:44:58.980391 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc\": container with ID starting with 7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc not found: ID does not exist" containerID="7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.980427 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc"} err="failed to get container status \"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc\": rpc error: code = NotFound desc = could not find container \"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc\": container with ID starting with 7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc not found: ID does not exist" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.980456 4791 scope.go:117] "RemoveContainer" containerID="1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c" Dec 08 21:44:58 crc kubenswrapper[4791]: E1208 21:44:58.980897 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c\": container with ID starting with 1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c not found: ID does not exist" containerID="1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.980925 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c"} err="failed to get container status \"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c\": rpc error: code = NotFound desc = could not find container \"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c\": container with ID starting with 1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c not found: ID does not exist" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.980940 4791 scope.go:117] "RemoveContainer" containerID="7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.981293 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc"} err="failed to get container status \"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc\": rpc error: code = NotFound desc = could not find container \"7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc\": container with ID starting with 7579fd88e0456d06c9171d43af5e734da3555b8df3ef0772f46b09fcb96aaacc not found: ID does not exist" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.981325 4791 scope.go:117] "RemoveContainer" containerID="1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c" Dec 08 21:44:58 crc kubenswrapper[4791]: I1208 21:44:58.982351 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c"} err="failed to get container status \"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c\": rpc error: code = NotFound desc = could not find container \"1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c\": container with ID starting with 1fe9c673f3c319638927d4376edecab9322cd2a0bde72f6d13df07b4fa78812c not found: ID does not exist" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.014776 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.028602 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:59 crc kubenswrapper[4791]: E1208 21:44:59.029671 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-metadata" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.029700 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-metadata" Dec 08 21:44:59 crc kubenswrapper[4791]: E1208 21:44:59.029773 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-log" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.029783 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-log" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.030201 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-log" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.030244 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" containerName="nova-metadata-metadata" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.046824 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.047007 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.050372 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.056515 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.155811 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrqp6\" (UniqueName: \"kubernetes.io/projected/c0ef3022-5928-4df9-a234-ceb460749df4-kube-api-access-nrqp6\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.156188 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-config-data\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.156296 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.156415 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0ef3022-5928-4df9-a234-ceb460749df4-logs\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.156620 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.260463 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.260598 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrqp6\" (UniqueName: \"kubernetes.io/projected/c0ef3022-5928-4df9-a234-ceb460749df4-kube-api-access-nrqp6\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.260633 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-config-data\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.260663 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.260737 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0ef3022-5928-4df9-a234-ceb460749df4-logs\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.261236 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0ef3022-5928-4df9-a234-ceb460749df4-logs\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.267303 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.272318 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.273033 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-config-data\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.281210 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrqp6\" (UniqueName: \"kubernetes.io/projected/c0ef3022-5928-4df9-a234-ceb460749df4-kube-api-access-nrqp6\") pod \"nova-metadata-0\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.401659 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.618076 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf" path="/var/lib/kubelet/pods/d5ac6c99-6f2e-4fcb-b9b1-50b8c2b4a5cf/volumes" Dec 08 21:44:59 crc kubenswrapper[4791]: I1208 21:44:59.934451 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.144943 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz"] Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.147928 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.150943 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.151156 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.166075 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz"] Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.196080 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-config-volume\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.196152 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k46jt\" (UniqueName: \"kubernetes.io/projected/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-kube-api-access-k46jt\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.196370 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-secret-volume\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.302500 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-secret-volume\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.303841 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-config-volume\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.304053 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k46jt\" (UniqueName: \"kubernetes.io/projected/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-kube-api-access-k46jt\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.306500 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-config-volume\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.307766 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-secret-volume\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.319130 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.319191 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.322973 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k46jt\" (UniqueName: \"kubernetes.io/projected/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-kube-api-access-k46jt\") pod \"collect-profiles-29420505-ztjvz\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.502320 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.715921 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.716292 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.748015 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.762053 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.849586 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.925662 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0ef3022-5928-4df9-a234-ceb460749df4","Type":"ContainerStarted","Data":"639284ecb43f4e60a86986ed31637b1fcc6c568e207600299345ef95c2c5c734"} Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.925751 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0ef3022-5928-4df9-a234-ceb460749df4","Type":"ContainerStarted","Data":"92323532d474a1ce626af0463cdeaaffc018da266e1013824b2b86cac03642a6"} Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.925766 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0ef3022-5928-4df9-a234-ceb460749df4","Type":"ContainerStarted","Data":"b0367986cb9fda088623378198cdcd6590caedcfa24bfacf53c10dde633419c3"} Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.932530 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-7hsq9"] Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.933196 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" podUID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerName="dnsmasq-dns" containerID="cri-o://37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55" gracePeriod=10 Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.936269 4791 generic.go:334] "Generic (PLEG): container finished" podID="f5bdb996-544f-412b-8279-09b9e0bc2510" containerID="8e3dbe569a4ad5f2c161b5771769cc3381bf0ca2f72a94a5aee50f86d778ade9" exitCode=0 Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.937354 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6tl54" event={"ID":"f5bdb996-544f-412b-8279-09b9e0bc2510","Type":"ContainerDied","Data":"8e3dbe569a4ad5f2c161b5771769cc3381bf0ca2f72a94a5aee50f86d778ade9"} Dec 08 21:45:00 crc kubenswrapper[4791]: I1208 21:45:00.951451 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.9514304239999998 podStartE2EDuration="2.951430424s" podCreationTimestamp="2025-12-08 21:44:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:00.949485507 +0000 UTC m=+1577.648243852" watchObservedRunningTime="2025-12-08 21:45:00.951430424 +0000 UTC m=+1577.650188769" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.002990 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.141268 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz"] Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.406293 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.216:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.407060 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.216:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.718506 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.780390 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-config\") pod \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.782548 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-svc\") pod \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.783296 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-swift-storage-0\") pod \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.783561 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmtwd\" (UniqueName: \"kubernetes.io/projected/72a6c619-8a0d-4a9f-b68f-f316cf96202d-kube-api-access-dmtwd\") pod \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.783751 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-sb\") pod \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.783913 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-nb\") pod \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\" (UID: \"72a6c619-8a0d-4a9f-b68f-f316cf96202d\") " Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.790596 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72a6c619-8a0d-4a9f-b68f-f316cf96202d-kube-api-access-dmtwd" (OuterVolumeSpecName: "kube-api-access-dmtwd") pod "72a6c619-8a0d-4a9f-b68f-f316cf96202d" (UID: "72a6c619-8a0d-4a9f-b68f-f316cf96202d"). InnerVolumeSpecName "kube-api-access-dmtwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.849541 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "72a6c619-8a0d-4a9f-b68f-f316cf96202d" (UID: "72a6c619-8a0d-4a9f-b68f-f316cf96202d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.871659 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "72a6c619-8a0d-4a9f-b68f-f316cf96202d" (UID: "72a6c619-8a0d-4a9f-b68f-f316cf96202d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.878848 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72a6c619-8a0d-4a9f-b68f-f316cf96202d" (UID: "72a6c619-8a0d-4a9f-b68f-f316cf96202d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.884339 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "72a6c619-8a0d-4a9f-b68f-f316cf96202d" (UID: "72a6c619-8a0d-4a9f-b68f-f316cf96202d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.887664 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.887691 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmtwd\" (UniqueName: \"kubernetes.io/projected/72a6c619-8a0d-4a9f-b68f-f316cf96202d-kube-api-access-dmtwd\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.887703 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.887739 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.887751 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.891448 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-config" (OuterVolumeSpecName: "config") pod "72a6c619-8a0d-4a9f-b68f-f316cf96202d" (UID: "72a6c619-8a0d-4a9f-b68f-f316cf96202d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.946779 4791 generic.go:334] "Generic (PLEG): container finished" podID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerID="37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55" exitCode=0 Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.946848 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" event={"ID":"72a6c619-8a0d-4a9f-b68f-f316cf96202d","Type":"ContainerDied","Data":"37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55"} Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.946879 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" event={"ID":"72a6c619-8a0d-4a9f-b68f-f316cf96202d","Type":"ContainerDied","Data":"f8fb3651cb8f13310d7cb88f6a275c7e28fd7741339f02165847142dc72c8fb2"} Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.946908 4791 scope.go:117] "RemoveContainer" containerID="37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.947048 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-7hsq9" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.954240 4791 generic.go:334] "Generic (PLEG): container finished" podID="cbd46a46-01ac-4b1c-b0b8-8f5cda691535" containerID="817238babfa892405415f9c41dc201222440e7b87a670dd3ea6cd71bdb92f767" exitCode=0 Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.954829 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" event={"ID":"cbd46a46-01ac-4b1c-b0b8-8f5cda691535","Type":"ContainerDied","Data":"817238babfa892405415f9c41dc201222440e7b87a670dd3ea6cd71bdb92f767"} Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.954861 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" event={"ID":"cbd46a46-01ac-4b1c-b0b8-8f5cda691535","Type":"ContainerStarted","Data":"1a0f34f0fc1dbba5ac98d529627f540a6c7112c4b5da20e5fb3c546bbcf56a9f"} Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.987719 4791 scope.go:117] "RemoveContainer" containerID="f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732" Dec 08 21:45:01 crc kubenswrapper[4791]: I1208 21:45:01.989101 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a6c619-8a0d-4a9f-b68f-f316cf96202d-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.029788 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-7hsq9"] Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.042796 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-7hsq9"] Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.391882 4791 scope.go:117] "RemoveContainer" containerID="37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55" Dec 08 21:45:02 crc kubenswrapper[4791]: E1208 21:45:02.392667 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55\": container with ID starting with 37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55 not found: ID does not exist" containerID="37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.392740 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55"} err="failed to get container status \"37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55\": rpc error: code = NotFound desc = could not find container \"37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55\": container with ID starting with 37ff669fa652230a76a974e01feb43540427b1a2e6e0edc36285a532a02e2a55 not found: ID does not exist" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.392794 4791 scope.go:117] "RemoveContainer" containerID="f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732" Dec 08 21:45:02 crc kubenswrapper[4791]: E1208 21:45:02.393145 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732\": container with ID starting with f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732 not found: ID does not exist" containerID="f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.393180 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732"} err="failed to get container status \"f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732\": rpc error: code = NotFound desc = could not find container \"f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732\": container with ID starting with f688d6fd5d52a10c3fa3d42360c6fb4a9919e1607e1b1d29c6224f5e3a693732 not found: ID does not exist" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.523386 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.702426 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-config-data\") pod \"f5bdb996-544f-412b-8279-09b9e0bc2510\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.702604 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqpr6\" (UniqueName: \"kubernetes.io/projected/f5bdb996-544f-412b-8279-09b9e0bc2510-kube-api-access-zqpr6\") pod \"f5bdb996-544f-412b-8279-09b9e0bc2510\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.702806 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-combined-ca-bundle\") pod \"f5bdb996-544f-412b-8279-09b9e0bc2510\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.702917 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-scripts\") pod \"f5bdb996-544f-412b-8279-09b9e0bc2510\" (UID: \"f5bdb996-544f-412b-8279-09b9e0bc2510\") " Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.707075 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-scripts" (OuterVolumeSpecName: "scripts") pod "f5bdb996-544f-412b-8279-09b9e0bc2510" (UID: "f5bdb996-544f-412b-8279-09b9e0bc2510"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.707128 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5bdb996-544f-412b-8279-09b9e0bc2510-kube-api-access-zqpr6" (OuterVolumeSpecName: "kube-api-access-zqpr6") pod "f5bdb996-544f-412b-8279-09b9e0bc2510" (UID: "f5bdb996-544f-412b-8279-09b9e0bc2510"). InnerVolumeSpecName "kube-api-access-zqpr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.734698 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5bdb996-544f-412b-8279-09b9e0bc2510" (UID: "f5bdb996-544f-412b-8279-09b9e0bc2510"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.750108 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-config-data" (OuterVolumeSpecName: "config-data") pod "f5bdb996-544f-412b-8279-09b9e0bc2510" (UID: "f5bdb996-544f-412b-8279-09b9e0bc2510"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.806918 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.806954 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqpr6\" (UniqueName: \"kubernetes.io/projected/f5bdb996-544f-412b-8279-09b9e0bc2510-kube-api-access-zqpr6\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.806965 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.806975 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5bdb996-544f-412b-8279-09b9e0bc2510-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.968031 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6tl54" event={"ID":"f5bdb996-544f-412b-8279-09b9e0bc2510","Type":"ContainerDied","Data":"abbe4133c1c8e64aa520897760c98045592c7180122900f45fc6c9e85b23bb4f"} Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.968098 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abbe4133c1c8e64aa520897760c98045592c7180122900f45fc6c9e85b23bb4f" Dec 08 21:45:02 crc kubenswrapper[4791]: I1208 21:45:02.968096 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6tl54" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.195387 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.195753 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-log" containerID="cri-o://0ea6761a76e32dcc035e62e1153dd1391d0bd691a64b982d01b6f0052ff68e97" gracePeriod=30 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.195983 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-api" containerID="cri-o://a1fe9235dcf4d1679495f5659b82e903935c2aba259f15c6a38c653ec330afa7" gracePeriod=30 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.223649 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.224222 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-log" containerID="cri-o://92323532d474a1ce626af0463cdeaaffc018da266e1013824b2b86cac03642a6" gracePeriod=30 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.224403 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-metadata" containerID="cri-o://639284ecb43f4e60a86986ed31637b1fcc6c568e207600299345ef95c2c5c734" gracePeriod=30 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.366539 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.367105 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="033789b4-c9c3-4364-a6d5-db34740240b4" containerName="nova-scheduler-scheduler" containerID="cri-o://33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132" gracePeriod=30 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.624644 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.635507 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" path="/var/lib/kubelet/pods/72a6c619-8a0d-4a9f-b68f-f316cf96202d/volumes" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.738110 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-secret-volume\") pod \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.738405 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-config-volume\") pod \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.738582 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k46jt\" (UniqueName: \"kubernetes.io/projected/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-kube-api-access-k46jt\") pod \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\" (UID: \"cbd46a46-01ac-4b1c-b0b8-8f5cda691535\") " Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.745112 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-kube-api-access-k46jt" (OuterVolumeSpecName: "kube-api-access-k46jt") pod "cbd46a46-01ac-4b1c-b0b8-8f5cda691535" (UID: "cbd46a46-01ac-4b1c-b0b8-8f5cda691535"). InnerVolumeSpecName "kube-api-access-k46jt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.748581 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-config-volume" (OuterVolumeSpecName: "config-volume") pod "cbd46a46-01ac-4b1c-b0b8-8f5cda691535" (UID: "cbd46a46-01ac-4b1c-b0b8-8f5cda691535"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.754278 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cbd46a46-01ac-4b1c-b0b8-8f5cda691535" (UID: "cbd46a46-01ac-4b1c-b0b8-8f5cda691535"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.843335 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.843390 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k46jt\" (UniqueName: \"kubernetes.io/projected/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-kube-api-access-k46jt\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.843408 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cbd46a46-01ac-4b1c-b0b8-8f5cda691535-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.986791 4791 generic.go:334] "Generic (PLEG): container finished" podID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerID="0ea6761a76e32dcc035e62e1153dd1391d0bd691a64b982d01b6f0052ff68e97" exitCode=143 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.987129 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e204b013-c633-4311-a89a-0547f8cfc6e3","Type":"ContainerDied","Data":"0ea6761a76e32dcc035e62e1153dd1391d0bd691a64b982d01b6f0052ff68e97"} Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.990452 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" event={"ID":"cbd46a46-01ac-4b1c-b0b8-8f5cda691535","Type":"ContainerDied","Data":"1a0f34f0fc1dbba5ac98d529627f540a6c7112c4b5da20e5fb3c546bbcf56a9f"} Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.990480 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a0f34f0fc1dbba5ac98d529627f540a6c7112c4b5da20e5fb3c546bbcf56a9f" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.990551 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz" Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.995068 4791 generic.go:334] "Generic (PLEG): container finished" podID="c0ef3022-5928-4df9-a234-ceb460749df4" containerID="639284ecb43f4e60a86986ed31637b1fcc6c568e207600299345ef95c2c5c734" exitCode=0 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.995102 4791 generic.go:334] "Generic (PLEG): container finished" podID="c0ef3022-5928-4df9-a234-ceb460749df4" containerID="92323532d474a1ce626af0463cdeaaffc018da266e1013824b2b86cac03642a6" exitCode=143 Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.995122 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0ef3022-5928-4df9-a234-ceb460749df4","Type":"ContainerDied","Data":"639284ecb43f4e60a86986ed31637b1fcc6c568e207600299345ef95c2c5c734"} Dec 08 21:45:03 crc kubenswrapper[4791]: I1208 21:45:03.995145 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0ef3022-5928-4df9-a234-ceb460749df4","Type":"ContainerDied","Data":"92323532d474a1ce626af0463cdeaaffc018da266e1013824b2b86cac03642a6"} Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.178509 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.251987 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrqp6\" (UniqueName: \"kubernetes.io/projected/c0ef3022-5928-4df9-a234-ceb460749df4-kube-api-access-nrqp6\") pod \"c0ef3022-5928-4df9-a234-ceb460749df4\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.252143 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0ef3022-5928-4df9-a234-ceb460749df4-logs\") pod \"c0ef3022-5928-4df9-a234-ceb460749df4\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.252269 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-combined-ca-bundle\") pod \"c0ef3022-5928-4df9-a234-ceb460749df4\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.252445 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-config-data\") pod \"c0ef3022-5928-4df9-a234-ceb460749df4\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.252478 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-nova-metadata-tls-certs\") pod \"c0ef3022-5928-4df9-a234-ceb460749df4\" (UID: \"c0ef3022-5928-4df9-a234-ceb460749df4\") " Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.255761 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0ef3022-5928-4df9-a234-ceb460749df4-logs" (OuterVolumeSpecName: "logs") pod "c0ef3022-5928-4df9-a234-ceb460749df4" (UID: "c0ef3022-5928-4df9-a234-ceb460749df4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.259842 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0ef3022-5928-4df9-a234-ceb460749df4-kube-api-access-nrqp6" (OuterVolumeSpecName: "kube-api-access-nrqp6") pod "c0ef3022-5928-4df9-a234-ceb460749df4" (UID: "c0ef3022-5928-4df9-a234-ceb460749df4"). InnerVolumeSpecName "kube-api-access-nrqp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.307213 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-config-data" (OuterVolumeSpecName: "config-data") pod "c0ef3022-5928-4df9-a234-ceb460749df4" (UID: "c0ef3022-5928-4df9-a234-ceb460749df4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.312886 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0ef3022-5928-4df9-a234-ceb460749df4" (UID: "c0ef3022-5928-4df9-a234-ceb460749df4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.332234 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c0ef3022-5928-4df9-a234-ceb460749df4" (UID: "c0ef3022-5928-4df9-a234-ceb460749df4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.355226 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrqp6\" (UniqueName: \"kubernetes.io/projected/c0ef3022-5928-4df9-a234-ceb460749df4-kube-api-access-nrqp6\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.355284 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0ef3022-5928-4df9-a234-ceb460749df4-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.355303 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.355315 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.355329 4791 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0ef3022-5928-4df9-a234-ceb460749df4-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:04 crc kubenswrapper[4791]: I1208 21:45:04.971508 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.025042 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0ef3022-5928-4df9-a234-ceb460749df4","Type":"ContainerDied","Data":"b0367986cb9fda088623378198cdcd6590caedcfa24bfacf53c10dde633419c3"} Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.025070 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.025137 4791 scope.go:117] "RemoveContainer" containerID="639284ecb43f4e60a86986ed31637b1fcc6c568e207600299345ef95c2c5c734" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.028067 4791 generic.go:334] "Generic (PLEG): container finished" podID="033789b4-c9c3-4364-a6d5-db34740240b4" containerID="33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132" exitCode=0 Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.028123 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"033789b4-c9c3-4364-a6d5-db34740240b4","Type":"ContainerDied","Data":"33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132"} Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.028153 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"033789b4-c9c3-4364-a6d5-db34740240b4","Type":"ContainerDied","Data":"2b38181e6bae1f16a2c1899652ba6d085fb7a1664c8725dbf3ecebbdcd7b49d1"} Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.028216 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.064506 4791 scope.go:117] "RemoveContainer" containerID="92323532d474a1ce626af0463cdeaaffc018da266e1013824b2b86cac03642a6" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.078471 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.091611 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-combined-ca-bundle\") pod \"033789b4-c9c3-4364-a6d5-db34740240b4\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.091682 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-config-data\") pod \"033789b4-c9c3-4364-a6d5-db34740240b4\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.091832 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwm4b\" (UniqueName: \"kubernetes.io/projected/033789b4-c9c3-4364-a6d5-db34740240b4-kube-api-access-xwm4b\") pod \"033789b4-c9c3-4364-a6d5-db34740240b4\" (UID: \"033789b4-c9c3-4364-a6d5-db34740240b4\") " Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.096946 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.100368 4791 scope.go:117] "RemoveContainer" containerID="33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.115092 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/033789b4-c9c3-4364-a6d5-db34740240b4-kube-api-access-xwm4b" (OuterVolumeSpecName: "kube-api-access-xwm4b") pod "033789b4-c9c3-4364-a6d5-db34740240b4" (UID: "033789b4-c9c3-4364-a6d5-db34740240b4"). InnerVolumeSpecName "kube-api-access-xwm4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.136538 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "033789b4-c9c3-4364-a6d5-db34740240b4" (UID: "033789b4-c9c3-4364-a6d5-db34740240b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.138002 4791 scope.go:117] "RemoveContainer" containerID="33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132" Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.138568 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132\": container with ID starting with 33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132 not found: ID does not exist" containerID="33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.138607 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132"} err="failed to get container status \"33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132\": rpc error: code = NotFound desc = could not find container \"33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132\": container with ID starting with 33db1fa78d0dc34574fff4243596bfb876f1367c4ea93215bf5e862897c98132 not found: ID does not exist" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.140187 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-config-data" (OuterVolumeSpecName: "config-data") pod "033789b4-c9c3-4364-a6d5-db34740240b4" (UID: "033789b4-c9c3-4364-a6d5-db34740240b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152097 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.152735 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5bdb996-544f-412b-8279-09b9e0bc2510" containerName="nova-manage" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152756 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5bdb996-544f-412b-8279-09b9e0bc2510" containerName="nova-manage" Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.152779 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerName="dnsmasq-dns" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152786 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerName="dnsmasq-dns" Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.152796 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbd46a46-01ac-4b1c-b0b8-8f5cda691535" containerName="collect-profiles" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152803 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbd46a46-01ac-4b1c-b0b8-8f5cda691535" containerName="collect-profiles" Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.152813 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="033789b4-c9c3-4364-a6d5-db34740240b4" containerName="nova-scheduler-scheduler" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152820 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="033789b4-c9c3-4364-a6d5-db34740240b4" containerName="nova-scheduler-scheduler" Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.152839 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerName="init" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152846 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerName="init" Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.152859 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-log" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152865 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-log" Dec 08 21:45:05 crc kubenswrapper[4791]: E1208 21:45:05.152874 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-metadata" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.152885 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-metadata" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.153119 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="72a6c619-8a0d-4a9f-b68f-f316cf96202d" containerName="dnsmasq-dns" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.153138 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-metadata" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.153155 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" containerName="nova-metadata-log" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.153165 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbd46a46-01ac-4b1c-b0b8-8f5cda691535" containerName="collect-profiles" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.153171 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="033789b4-c9c3-4364-a6d5-db34740240b4" containerName="nova-scheduler-scheduler" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.153196 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5bdb996-544f-412b-8279-09b9e0bc2510" containerName="nova-manage" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.154702 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.158192 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.158742 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.163556 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.194754 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwm4b\" (UniqueName: \"kubernetes.io/projected/033789b4-c9c3-4364-a6d5-db34740240b4-kube-api-access-xwm4b\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.194790 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.194800 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033789b4-c9c3-4364-a6d5-db34740240b4-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.251313 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.251371 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.296701 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-config-data\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.296781 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v6hn\" (UniqueName: \"kubernetes.io/projected/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-kube-api-access-9v6hn\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.296811 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-logs\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.297373 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.297478 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.395814 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.400256 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.400321 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.400421 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-config-data\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.400446 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v6hn\" (UniqueName: \"kubernetes.io/projected/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-kube-api-access-9v6hn\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.400466 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-logs\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.401341 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-logs\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.408849 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.408946 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.413476 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.413931 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-config-data\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.419310 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.421080 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v6hn\" (UniqueName: \"kubernetes.io/projected/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-kube-api-access-9v6hn\") pod \"nova-metadata-0\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.421154 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.423750 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.440428 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.478852 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.502633 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.503088 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkbtf\" (UniqueName: \"kubernetes.io/projected/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-kube-api-access-qkbtf\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.503314 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-config-data\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.605918 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-config-data\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.606067 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.606094 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkbtf\" (UniqueName: \"kubernetes.io/projected/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-kube-api-access-qkbtf\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.618658 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-config-data\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.621663 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkbtf\" (UniqueName: \"kubernetes.io/projected/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-kube-api-access-qkbtf\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.621817 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.623187 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="033789b4-c9c3-4364-a6d5-db34740240b4" path="/var/lib/kubelet/pods/033789b4-c9c3-4364-a6d5-db34740240b4/volumes" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.624243 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0ef3022-5928-4df9-a234-ceb460749df4" path="/var/lib/kubelet/pods/c0ef3022-5928-4df9-a234-ceb460749df4/volumes" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.633723 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:05 crc kubenswrapper[4791]: I1208 21:45:05.987531 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:06 crc kubenswrapper[4791]: I1208 21:45:06.085513 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3","Type":"ContainerStarted","Data":"d34fd8293cb97ed8854d871a84163f1fb9f308509523ac44cf5d3d965fe08f3f"} Dec 08 21:45:06 crc kubenswrapper[4791]: I1208 21:45:06.159531 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.100199 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93","Type":"ContainerStarted","Data":"e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294"} Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.100702 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93","Type":"ContainerStarted","Data":"3b7773fa49087b2e782afc5bbb5a6428c2e852436cafd9f325fe817e74c95917"} Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.102589 4791 generic.go:334] "Generic (PLEG): container finished" podID="4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" containerID="34b6a0637268b786e00dbaf8e3334ebb53ccf8ed29eca966d9e836eafc366ba3" exitCode=0 Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.102645 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" event={"ID":"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40","Type":"ContainerDied","Data":"34b6a0637268b786e00dbaf8e3334ebb53ccf8ed29eca966d9e836eafc366ba3"} Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.105327 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3","Type":"ContainerStarted","Data":"be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732"} Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.105353 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3","Type":"ContainerStarted","Data":"cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7"} Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.126397 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.126375736 podStartE2EDuration="2.126375736s" podCreationTimestamp="2025-12-08 21:45:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:07.118690497 +0000 UTC m=+1583.817448842" watchObservedRunningTime="2025-12-08 21:45:07.126375736 +0000 UTC m=+1583.825134081" Dec 08 21:45:07 crc kubenswrapper[4791]: I1208 21:45:07.165380 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.165356854 podStartE2EDuration="2.165356854s" podCreationTimestamp="2025-12-08 21:45:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:07.151788981 +0000 UTC m=+1583.850547326" watchObservedRunningTime="2025-12-08 21:45:07.165356854 +0000 UTC m=+1583.864115199" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.117789 4791 generic.go:334] "Generic (PLEG): container finished" podID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerID="a1fe9235dcf4d1679495f5659b82e903935c2aba259f15c6a38c653ec330afa7" exitCode=0 Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.117839 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e204b013-c633-4311-a89a-0547f8cfc6e3","Type":"ContainerDied","Data":"a1fe9235dcf4d1679495f5659b82e903935c2aba259f15c6a38c653ec330afa7"} Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.118256 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e204b013-c633-4311-a89a-0547f8cfc6e3","Type":"ContainerDied","Data":"efa1a5f5a57679c8eef39fea9807190ec855fdb7dfd53f1c6de0d2cb51261c60"} Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.118275 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efa1a5f5a57679c8eef39fea9807190ec855fdb7dfd53f1c6de0d2cb51261c60" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.237174 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.275337 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-combined-ca-bundle\") pod \"e204b013-c633-4311-a89a-0547f8cfc6e3\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.275672 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxkdh\" (UniqueName: \"kubernetes.io/projected/e204b013-c633-4311-a89a-0547f8cfc6e3-kube-api-access-qxkdh\") pod \"e204b013-c633-4311-a89a-0547f8cfc6e3\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.275737 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e204b013-c633-4311-a89a-0547f8cfc6e3-logs\") pod \"e204b013-c633-4311-a89a-0547f8cfc6e3\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.276509 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e204b013-c633-4311-a89a-0547f8cfc6e3-logs" (OuterVolumeSpecName: "logs") pod "e204b013-c633-4311-a89a-0547f8cfc6e3" (UID: "e204b013-c633-4311-a89a-0547f8cfc6e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.276802 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-config-data\") pod \"e204b013-c633-4311-a89a-0547f8cfc6e3\" (UID: \"e204b013-c633-4311-a89a-0547f8cfc6e3\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.278128 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e204b013-c633-4311-a89a-0547f8cfc6e3-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.285071 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e204b013-c633-4311-a89a-0547f8cfc6e3-kube-api-access-qxkdh" (OuterVolumeSpecName: "kube-api-access-qxkdh") pod "e204b013-c633-4311-a89a-0547f8cfc6e3" (UID: "e204b013-c633-4311-a89a-0547f8cfc6e3"). InnerVolumeSpecName "kube-api-access-qxkdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.323920 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-config-data" (OuterVolumeSpecName: "config-data") pod "e204b013-c633-4311-a89a-0547f8cfc6e3" (UID: "e204b013-c633-4311-a89a-0547f8cfc6e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.323963 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e204b013-c633-4311-a89a-0547f8cfc6e3" (UID: "e204b013-c633-4311-a89a-0547f8cfc6e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.381096 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.381144 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxkdh\" (UniqueName: \"kubernetes.io/projected/e204b013-c633-4311-a89a-0547f8cfc6e3-kube-api-access-qxkdh\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.381162 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e204b013-c633-4311-a89a-0547f8cfc6e3-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.406603 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.483060 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-config-data\") pod \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.483206 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5584\" (UniqueName: \"kubernetes.io/projected/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-kube-api-access-r5584\") pod \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.483357 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-combined-ca-bundle\") pod \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.483443 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-scripts\") pod \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\" (UID: \"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40\") " Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.486846 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-scripts" (OuterVolumeSpecName: "scripts") pod "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" (UID: "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.486925 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-kube-api-access-r5584" (OuterVolumeSpecName: "kube-api-access-r5584") pod "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" (UID: "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40"). InnerVolumeSpecName "kube-api-access-r5584". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.512554 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-config-data" (OuterVolumeSpecName: "config-data") pod "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" (UID: "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.522977 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" (UID: "4d1939f0-2dba-46a3-96ab-bb0fd01e0c40"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.586540 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.586598 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5584\" (UniqueName: \"kubernetes.io/projected/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-kube-api-access-r5584\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.586613 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:08 crc kubenswrapper[4791]: I1208 21:45:08.586627 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.132553 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.132564 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.132599 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wkrbl" event={"ID":"4d1939f0-2dba-46a3-96ab-bb0fd01e0c40","Type":"ContainerDied","Data":"21e8766d53a1c43bf50880220fb5dfc78dca4ce893603f82eddfb9372399e41a"} Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.133100 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21e8766d53a1c43bf50880220fb5dfc78dca4ce893603f82eddfb9372399e41a" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.192242 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.205526 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.221963 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:09 crc kubenswrapper[4791]: E1208 21:45:09.222573 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" containerName="nova-cell1-conductor-db-sync" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.222594 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" containerName="nova-cell1-conductor-db-sync" Dec 08 21:45:09 crc kubenswrapper[4791]: E1208 21:45:09.222629 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-api" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.222639 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-api" Dec 08 21:45:09 crc kubenswrapper[4791]: E1208 21:45:09.223668 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-log" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.223731 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-log" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.224064 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" containerName="nova-cell1-conductor-db-sync" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.224103 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-log" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.224121 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" containerName="nova-api-api" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.225799 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.234375 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.241420 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.267992 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.270771 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.274150 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.307484 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.312133 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.312199 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-config-data\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.312274 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.312340 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6rrj\" (UniqueName: \"kubernetes.io/projected/3d11f729-775d-4a25-8eb4-27757ae78469-kube-api-access-b6rrj\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.312367 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d11f729-775d-4a25-8eb4-27757ae78469-logs\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.312492 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrfnm\" (UniqueName: \"kubernetes.io/projected/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-kube-api-access-mrfnm\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.312664 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.414719 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrfnm\" (UniqueName: \"kubernetes.io/projected/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-kube-api-access-mrfnm\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.414907 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.414958 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.415003 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-config-data\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.415035 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.415083 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6rrj\" (UniqueName: \"kubernetes.io/projected/3d11f729-775d-4a25-8eb4-27757ae78469-kube-api-access-b6rrj\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.415103 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d11f729-775d-4a25-8eb4-27757ae78469-logs\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.415579 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d11f729-775d-4a25-8eb4-27757ae78469-logs\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.420012 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.420515 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.421228 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.422563 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-config-data\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.431943 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrfnm\" (UniqueName: \"kubernetes.io/projected/11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d-kube-api-access-mrfnm\") pod \"nova-cell1-conductor-0\" (UID: \"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.448372 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6rrj\" (UniqueName: \"kubernetes.io/projected/3d11f729-775d-4a25-8eb4-27757ae78469-kube-api-access-b6rrj\") pod \"nova-api-0\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.562194 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.592981 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:09 crc kubenswrapper[4791]: I1208 21:45:09.621239 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e204b013-c633-4311-a89a-0547f8cfc6e3" path="/var/lib/kubelet/pods/e204b013-c633-4311-a89a-0547f8cfc6e3/volumes" Dec 08 21:45:10 crc kubenswrapper[4791]: I1208 21:45:10.063624 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:10 crc kubenswrapper[4791]: I1208 21:45:10.146252 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d11f729-775d-4a25-8eb4-27757ae78469","Type":"ContainerStarted","Data":"706bb88e2aec7955cbabc2130683dce4a0338bf079c658175619e82d00d2b68d"} Dec 08 21:45:10 crc kubenswrapper[4791]: W1208 21:45:10.170268 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11583b6c_2a5f_4a4d_b4ec_b59d3dc32f5d.slice/crio-04e0f225de9800f7de0d049956aa3e60a292d0b8cd12f656aa743fe36ae29a79 WatchSource:0}: Error finding container 04e0f225de9800f7de0d049956aa3e60a292d0b8cd12f656aa743fe36ae29a79: Status 404 returned error can't find the container with id 04e0f225de9800f7de0d049956aa3e60a292d0b8cd12f656aa743fe36ae29a79 Dec 08 21:45:10 crc kubenswrapper[4791]: I1208 21:45:10.174011 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 08 21:45:10 crc kubenswrapper[4791]: I1208 21:45:10.479807 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:45:10 crc kubenswrapper[4791]: I1208 21:45:10.481598 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:45:10 crc kubenswrapper[4791]: I1208 21:45:10.634210 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 08 21:45:11 crc kubenswrapper[4791]: I1208 21:45:11.157124 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d11f729-775d-4a25-8eb4-27757ae78469","Type":"ContainerStarted","Data":"32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b"} Dec 08 21:45:11 crc kubenswrapper[4791]: I1208 21:45:11.157517 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d11f729-775d-4a25-8eb4-27757ae78469","Type":"ContainerStarted","Data":"451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10"} Dec 08 21:45:11 crc kubenswrapper[4791]: I1208 21:45:11.160832 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d","Type":"ContainerStarted","Data":"df2325dd1cbfbef73c68f501b7a1b9e304e8a63bb2ed691b2593557ff6edcca8"} Dec 08 21:45:11 crc kubenswrapper[4791]: I1208 21:45:11.160875 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d","Type":"ContainerStarted","Data":"04e0f225de9800f7de0d049956aa3e60a292d0b8cd12f656aa743fe36ae29a79"} Dec 08 21:45:11 crc kubenswrapper[4791]: I1208 21:45:11.160969 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:11 crc kubenswrapper[4791]: I1208 21:45:11.176561 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.17654065 podStartE2EDuration="2.17654065s" podCreationTimestamp="2025-12-08 21:45:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:11.17167411 +0000 UTC m=+1587.870432455" watchObservedRunningTime="2025-12-08 21:45:11.17654065 +0000 UTC m=+1587.875298995" Dec 08 21:45:11 crc kubenswrapper[4791]: I1208 21:45:11.196459 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.196441499 podStartE2EDuration="2.196441499s" podCreationTimestamp="2025-12-08 21:45:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:11.184367882 +0000 UTC m=+1587.883126237" watchObservedRunningTime="2025-12-08 21:45:11.196441499 +0000 UTC m=+1587.895199844" Dec 08 21:45:15 crc kubenswrapper[4791]: I1208 21:45:15.480566 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:45:15 crc kubenswrapper[4791]: I1208 21:45:15.481066 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:45:15 crc kubenswrapper[4791]: I1208 21:45:15.634210 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 08 21:45:15 crc kubenswrapper[4791]: I1208 21:45:15.677516 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.036962 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.119390 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-combined-ca-bundle\") pod \"51be1224-1eae-451d-9bac-6ab628fded83\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.119654 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data-custom\") pod \"51be1224-1eae-451d-9bac-6ab628fded83\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.119692 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data\") pod \"51be1224-1eae-451d-9bac-6ab628fded83\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.119767 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdt6k\" (UniqueName: \"kubernetes.io/projected/51be1224-1eae-451d-9bac-6ab628fded83-kube-api-access-bdt6k\") pod \"51be1224-1eae-451d-9bac-6ab628fded83\" (UID: \"51be1224-1eae-451d-9bac-6ab628fded83\") " Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.151019 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51be1224-1eae-451d-9bac-6ab628fded83-kube-api-access-bdt6k" (OuterVolumeSpecName: "kube-api-access-bdt6k") pod "51be1224-1eae-451d-9bac-6ab628fded83" (UID: "51be1224-1eae-451d-9bac-6ab628fded83"). InnerVolumeSpecName "kube-api-access-bdt6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.151140 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "51be1224-1eae-451d-9bac-6ab628fded83" (UID: "51be1224-1eae-451d-9bac-6ab628fded83"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.159535 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51be1224-1eae-451d-9bac-6ab628fded83" (UID: "51be1224-1eae-451d-9bac-6ab628fded83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.204532 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data" (OuterVolumeSpecName: "config-data") pod "51be1224-1eae-451d-9bac-6ab628fded83" (UID: "51be1224-1eae-451d-9bac-6ab628fded83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.220556 4791 generic.go:334] "Generic (PLEG): container finished" podID="51be1224-1eae-451d-9bac-6ab628fded83" containerID="b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e" exitCode=137 Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.220604 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84b7f8ffb-qdgth" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.220629 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84b7f8ffb-qdgth" event={"ID":"51be1224-1eae-451d-9bac-6ab628fded83","Type":"ContainerDied","Data":"b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e"} Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.221569 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84b7f8ffb-qdgth" event={"ID":"51be1224-1eae-451d-9bac-6ab628fded83","Type":"ContainerDied","Data":"e725f2954f34894ba3390aafc25120b07b2e20c3d5bd9da826758e696ad59d52"} Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.221674 4791 scope.go:117] "RemoveContainer" containerID="b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.223248 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.223280 4791 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.223293 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51be1224-1eae-451d-9bac-6ab628fded83-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.223306 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdt6k\" (UniqueName: \"kubernetes.io/projected/51be1224-1eae-451d-9bac-6ab628fded83-kube-api-access-bdt6k\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.275495 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.277406 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-84b7f8ffb-qdgth"] Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.290409 4791 scope.go:117] "RemoveContainer" containerID="b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e" Dec 08 21:45:16 crc kubenswrapper[4791]: E1208 21:45:16.290950 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e\": container with ID starting with b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e not found: ID does not exist" containerID="b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.291019 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e"} err="failed to get container status \"b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e\": rpc error: code = NotFound desc = could not find container \"b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e\": container with ID starting with b8c35b3d17975f605c4966dd5979ccb3a1b526daad6e2abe4e44e58786608a8e not found: ID does not exist" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.293861 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-84b7f8ffb-qdgth"] Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.494933 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:16 crc kubenswrapper[4791]: I1208 21:45:16.495010 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:17 crc kubenswrapper[4791]: I1208 21:45:17.621311 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51be1224-1eae-451d-9bac-6ab628fded83" path="/var/lib/kubelet/pods/51be1224-1eae-451d-9bac-6ab628fded83/volumes" Dec 08 21:45:19 crc kubenswrapper[4791]: I1208 21:45:19.563463 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:45:19 crc kubenswrapper[4791]: I1208 21:45:19.563796 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:45:19 crc kubenswrapper[4791]: I1208 21:45:19.621509 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 08 21:45:20 crc kubenswrapper[4791]: I1208 21:45:20.646881 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.226:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:20 crc kubenswrapper[4791]: I1208 21:45:20.646977 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.226:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:25 crc kubenswrapper[4791]: I1208 21:45:25.485995 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:45:25 crc kubenswrapper[4791]: I1208 21:45:25.487930 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:45:25 crc kubenswrapper[4791]: I1208 21:45:25.493549 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:45:26 crc kubenswrapper[4791]: I1208 21:45:26.337900 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.306252 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.346699 4791 generic.go:334] "Generic (PLEG): container finished" podID="74378ef2-518b-4974-bf29-33b29b6ab17d" containerID="c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000" exitCode=137 Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.347988 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.348142 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"74378ef2-518b-4974-bf29-33b29b6ab17d","Type":"ContainerDied","Data":"c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000"} Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.348178 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"74378ef2-518b-4974-bf29-33b29b6ab17d","Type":"ContainerDied","Data":"a4ae6e8dee4e0e378d94c80d039b24e7dd8b6d0055b1a1016d3faaacc83ebed0"} Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.348204 4791 scope.go:117] "RemoveContainer" containerID="c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.381636 4791 scope.go:117] "RemoveContainer" containerID="c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000" Dec 08 21:45:27 crc kubenswrapper[4791]: E1208 21:45:27.384550 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000\": container with ID starting with c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000 not found: ID does not exist" containerID="c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.384611 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000"} err="failed to get container status \"c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000\": rpc error: code = NotFound desc = could not find container \"c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000\": container with ID starting with c7f46c3042c28659e655069923bec570d104eae28dad8ae40063e40b753e8000 not found: ID does not exist" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.433975 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-combined-ca-bundle\") pod \"74378ef2-518b-4974-bf29-33b29b6ab17d\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.434168 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rghxt\" (UniqueName: \"kubernetes.io/projected/74378ef2-518b-4974-bf29-33b29b6ab17d-kube-api-access-rghxt\") pod \"74378ef2-518b-4974-bf29-33b29b6ab17d\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.434200 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-config-data\") pod \"74378ef2-518b-4974-bf29-33b29b6ab17d\" (UID: \"74378ef2-518b-4974-bf29-33b29b6ab17d\") " Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.450087 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74378ef2-518b-4974-bf29-33b29b6ab17d-kube-api-access-rghxt" (OuterVolumeSpecName: "kube-api-access-rghxt") pod "74378ef2-518b-4974-bf29-33b29b6ab17d" (UID: "74378ef2-518b-4974-bf29-33b29b6ab17d"). InnerVolumeSpecName "kube-api-access-rghxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.465261 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74378ef2-518b-4974-bf29-33b29b6ab17d" (UID: "74378ef2-518b-4974-bf29-33b29b6ab17d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.475653 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-config-data" (OuterVolumeSpecName: "config-data") pod "74378ef2-518b-4974-bf29-33b29b6ab17d" (UID: "74378ef2-518b-4974-bf29-33b29b6ab17d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.538354 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.538404 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rghxt\" (UniqueName: \"kubernetes.io/projected/74378ef2-518b-4974-bf29-33b29b6ab17d-kube-api-access-rghxt\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.538418 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74378ef2-518b-4974-bf29-33b29b6ab17d-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.714880 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.737583 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.746683 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:45:27 crc kubenswrapper[4791]: E1208 21:45:27.747312 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51be1224-1eae-451d-9bac-6ab628fded83" containerName="heat-api" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.747334 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="51be1224-1eae-451d-9bac-6ab628fded83" containerName="heat-api" Dec 08 21:45:27 crc kubenswrapper[4791]: E1208 21:45:27.747362 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74378ef2-518b-4974-bf29-33b29b6ab17d" containerName="nova-cell1-novncproxy-novncproxy" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.747371 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="74378ef2-518b-4974-bf29-33b29b6ab17d" containerName="nova-cell1-novncproxy-novncproxy" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.747727 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="74378ef2-518b-4974-bf29-33b29b6ab17d" containerName="nova-cell1-novncproxy-novncproxy" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.747768 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="51be1224-1eae-451d-9bac-6ab628fded83" containerName="heat-api" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.748900 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.752877 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.752961 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.754379 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.762791 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.967735 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.968094 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmwg5\" (UniqueName: \"kubernetes.io/projected/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-kube-api-access-lmwg5\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.968123 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.968160 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:27 crc kubenswrapper[4791]: I1208 21:45:27.968311 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.070388 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.070458 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmwg5\" (UniqueName: \"kubernetes.io/projected/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-kube-api-access-lmwg5\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.070492 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.070532 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.070792 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.075857 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.076644 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.077472 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.078403 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.088992 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmwg5\" (UniqueName: \"kubernetes.io/projected/8b6dd596-2078-462a-9c8c-5694cf4e1f9d-kube-api-access-lmwg5\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b6dd596-2078-462a-9c8c-5694cf4e1f9d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.371604 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:28 crc kubenswrapper[4791]: I1208 21:45:28.848180 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:45:28 crc kubenswrapper[4791]: W1208 21:45:28.855467 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b6dd596_2078_462a_9c8c_5694cf4e1f9d.slice/crio-c3d7023feee33eca4d7df8cd66b20fda01c974bd1689ee331cb83393a8dad406 WatchSource:0}: Error finding container c3d7023feee33eca4d7df8cd66b20fda01c974bd1689ee331cb83393a8dad406: Status 404 returned error can't find the container with id c3d7023feee33eca4d7df8cd66b20fda01c974bd1689ee331cb83393a8dad406 Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.370890 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8b6dd596-2078-462a-9c8c-5694cf4e1f9d","Type":"ContainerStarted","Data":"7929ff7a41a0e420e00e1c037335835afbc8cf71cae1925abcd825ddc9fb8093"} Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.370941 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8b6dd596-2078-462a-9c8c-5694cf4e1f9d","Type":"ContainerStarted","Data":"c3d7023feee33eca4d7df8cd66b20fda01c974bd1689ee331cb83393a8dad406"} Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.402118 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.402098767 podStartE2EDuration="2.402098767s" podCreationTimestamp="2025-12-08 21:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:29.389921038 +0000 UTC m=+1606.088679393" watchObservedRunningTime="2025-12-08 21:45:29.402098767 +0000 UTC m=+1606.100857112" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.567751 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.568478 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.568814 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.568835 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.572756 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.572935 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.612680 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74378ef2-518b-4974-bf29-33b29b6ab17d" path="/var/lib/kubelet/pods/74378ef2-518b-4974-bf29-33b29b6ab17d/volumes" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.763005 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-tdxxg"] Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.765142 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.782257 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-tdxxg"] Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.853354 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9c49\" (UniqueName: \"kubernetes.io/projected/974e6ad2-a5d0-4a35-b88a-a72fea48b754-kube-api-access-k9c49\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.853432 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-config\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.853641 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-dns-swift-storage-0\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.853761 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-ovsdbserver-nb\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.853827 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-dns-svc\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.853956 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-ovsdbserver-sb\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.957493 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-dns-swift-storage-0\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.957594 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-ovsdbserver-nb\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.957629 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-dns-svc\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.957696 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-ovsdbserver-sb\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.957764 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9c49\" (UniqueName: \"kubernetes.io/projected/974e6ad2-a5d0-4a35-b88a-a72fea48b754-kube-api-access-k9c49\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.957808 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-config\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.958796 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-config\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.959114 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-dns-svc\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.959321 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-ovsdbserver-sb\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.959846 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-dns-swift-storage-0\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.960099 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/974e6ad2-a5d0-4a35-b88a-a72fea48b754-ovsdbserver-nb\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:29 crc kubenswrapper[4791]: I1208 21:45:29.980570 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9c49\" (UniqueName: \"kubernetes.io/projected/974e6ad2-a5d0-4a35-b88a-a72fea48b754-kube-api-access-k9c49\") pod \"dnsmasq-dns-f84f9ccf-tdxxg\" (UID: \"974e6ad2-a5d0-4a35-b88a-a72fea48b754\") " pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:30 crc kubenswrapper[4791]: I1208 21:45:30.104077 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:30 crc kubenswrapper[4791]: I1208 21:45:30.674337 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-tdxxg"] Dec 08 21:45:30 crc kubenswrapper[4791]: W1208 21:45:30.676258 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod974e6ad2_a5d0_4a35_b88a_a72fea48b754.slice/crio-05a2eaff5eaf824c5fa1ab66252067d91ac4226d10d9a3520167327d179229a3 WatchSource:0}: Error finding container 05a2eaff5eaf824c5fa1ab66252067d91ac4226d10d9a3520167327d179229a3: Status 404 returned error can't find the container with id 05a2eaff5eaf824c5fa1ab66252067d91ac4226d10d9a3520167327d179229a3 Dec 08 21:45:31 crc kubenswrapper[4791]: I1208 21:45:31.395183 4791 generic.go:334] "Generic (PLEG): container finished" podID="974e6ad2-a5d0-4a35-b88a-a72fea48b754" containerID="dcaa21893a95aab890a0ac8607bd9c3e8698740bf5314a0dea8c86db9501e80e" exitCode=0 Dec 08 21:45:31 crc kubenswrapper[4791]: I1208 21:45:31.395273 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" event={"ID":"974e6ad2-a5d0-4a35-b88a-a72fea48b754","Type":"ContainerDied","Data":"dcaa21893a95aab890a0ac8607bd9c3e8698740bf5314a0dea8c86db9501e80e"} Dec 08 21:45:31 crc kubenswrapper[4791]: I1208 21:45:31.395317 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" event={"ID":"974e6ad2-a5d0-4a35-b88a-a72fea48b754","Type":"ContainerStarted","Data":"05a2eaff5eaf824c5fa1ab66252067d91ac4226d10d9a3520167327d179229a3"} Dec 08 21:45:32 crc kubenswrapper[4791]: I1208 21:45:32.407762 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" event={"ID":"974e6ad2-a5d0-4a35-b88a-a72fea48b754","Type":"ContainerStarted","Data":"8282200a7e776b621360b23f78eb77fba04ad2b757c564970efa60708a4fc919"} Dec 08 21:45:32 crc kubenswrapper[4791]: I1208 21:45:32.408310 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:32 crc kubenswrapper[4791]: I1208 21:45:32.447213 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" podStartSLOduration=3.447191692 podStartE2EDuration="3.447191692s" podCreationTimestamp="2025-12-08 21:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:32.431338472 +0000 UTC m=+1609.130096817" watchObservedRunningTime="2025-12-08 21:45:32.447191692 +0000 UTC m=+1609.145950037" Dec 08 21:45:32 crc kubenswrapper[4791]: I1208 21:45:32.483374 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:32 crc kubenswrapper[4791]: I1208 21:45:32.483853 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-api" containerID="cri-o://32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b" gracePeriod=30 Dec 08 21:45:32 crc kubenswrapper[4791]: I1208 21:45:32.483838 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-log" containerID="cri-o://451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10" gracePeriod=30 Dec 08 21:45:33 crc kubenswrapper[4791]: I1208 21:45:33.372285 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:33 crc kubenswrapper[4791]: I1208 21:45:33.419004 4791 generic.go:334] "Generic (PLEG): container finished" podID="3d11f729-775d-4a25-8eb4-27757ae78469" containerID="451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10" exitCode=143 Dec 08 21:45:33 crc kubenswrapper[4791]: I1208 21:45:33.419097 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d11f729-775d-4a25-8eb4-27757ae78469","Type":"ContainerDied","Data":"451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10"} Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.251212 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.251490 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.251539 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.252609 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.252667 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" gracePeriod=600 Dec 08 21:45:35 crc kubenswrapper[4791]: E1208 21:45:35.391101 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.442814 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" exitCode=0 Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.442861 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43"} Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.442921 4791 scope.go:117] "RemoveContainer" containerID="294a71027908b890218887589647a3ead6eb10efa3b336cb826e5a4ab73343c7" Dec 08 21:45:35 crc kubenswrapper[4791]: I1208 21:45:35.444372 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:45:35 crc kubenswrapper[4791]: E1208 21:45:35.445534 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.180820 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.239020 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6rrj\" (UniqueName: \"kubernetes.io/projected/3d11f729-775d-4a25-8eb4-27757ae78469-kube-api-access-b6rrj\") pod \"3d11f729-775d-4a25-8eb4-27757ae78469\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.239225 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d11f729-775d-4a25-8eb4-27757ae78469-logs\") pod \"3d11f729-775d-4a25-8eb4-27757ae78469\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.239516 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-combined-ca-bundle\") pod \"3d11f729-775d-4a25-8eb4-27757ae78469\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.239613 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-config-data\") pod \"3d11f729-775d-4a25-8eb4-27757ae78469\" (UID: \"3d11f729-775d-4a25-8eb4-27757ae78469\") " Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.239870 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d11f729-775d-4a25-8eb4-27757ae78469-logs" (OuterVolumeSpecName: "logs") pod "3d11f729-775d-4a25-8eb4-27757ae78469" (UID: "3d11f729-775d-4a25-8eb4-27757ae78469"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.240336 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d11f729-775d-4a25-8eb4-27757ae78469-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.259056 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d11f729-775d-4a25-8eb4-27757ae78469-kube-api-access-b6rrj" (OuterVolumeSpecName: "kube-api-access-b6rrj") pod "3d11f729-775d-4a25-8eb4-27757ae78469" (UID: "3d11f729-775d-4a25-8eb4-27757ae78469"). InnerVolumeSpecName "kube-api-access-b6rrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.275988 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d11f729-775d-4a25-8eb4-27757ae78469" (UID: "3d11f729-775d-4a25-8eb4-27757ae78469"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.280371 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-config-data" (OuterVolumeSpecName: "config-data") pod "3d11f729-775d-4a25-8eb4-27757ae78469" (UID: "3d11f729-775d-4a25-8eb4-27757ae78469"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.343061 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6rrj\" (UniqueName: \"kubernetes.io/projected/3d11f729-775d-4a25-8eb4-27757ae78469-kube-api-access-b6rrj\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.343327 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.343435 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d11f729-775d-4a25-8eb4-27757ae78469-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.454976 4791 generic.go:334] "Generic (PLEG): container finished" podID="3d11f729-775d-4a25-8eb4-27757ae78469" containerID="32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b" exitCode=0 Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.455039 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d11f729-775d-4a25-8eb4-27757ae78469","Type":"ContainerDied","Data":"32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b"} Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.455068 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d11f729-775d-4a25-8eb4-27757ae78469","Type":"ContainerDied","Data":"706bb88e2aec7955cbabc2130683dce4a0338bf079c658175619e82d00d2b68d"} Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.455088 4791 scope.go:117] "RemoveContainer" containerID="32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.455198 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.479021 4791 scope.go:117] "RemoveContainer" containerID="451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.501155 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.510606 4791 scope.go:117] "RemoveContainer" containerID="32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b" Dec 08 21:45:36 crc kubenswrapper[4791]: E1208 21:45:36.511136 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b\": container with ID starting with 32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b not found: ID does not exist" containerID="32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.511190 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b"} err="failed to get container status \"32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b\": rpc error: code = NotFound desc = could not find container \"32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b\": container with ID starting with 32c9d87e0654831fa811f9d5c619b5aa63975b6ceb20264c2954cfcba27c254b not found: ID does not exist" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.511227 4791 scope.go:117] "RemoveContainer" containerID="451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10" Dec 08 21:45:36 crc kubenswrapper[4791]: E1208 21:45:36.511532 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10\": container with ID starting with 451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10 not found: ID does not exist" containerID="451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.511568 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10"} err="failed to get container status \"451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10\": rpc error: code = NotFound desc = could not find container \"451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10\": container with ID starting with 451f0903daf1e33ed86d9d8effc2664ecbdc4820c70574369a893aaa520ced10 not found: ID does not exist" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.514130 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.539529 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:36 crc kubenswrapper[4791]: E1208 21:45:36.540147 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-log" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.540174 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-log" Dec 08 21:45:36 crc kubenswrapper[4791]: E1208 21:45:36.540228 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-api" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.540237 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-api" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.540579 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-api" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.540613 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" containerName="nova-api-log" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.542304 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.544858 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.545148 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.547329 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.560162 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.649328 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-public-tls-certs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.649989 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhm72\" (UniqueName: \"kubernetes.io/projected/361b4085-e6a8-45c8-b172-fe0fc321ca17-kube-api-access-dhm72\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.650110 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/361b4085-e6a8-45c8-b172-fe0fc321ca17-logs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.650519 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-config-data\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.650664 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.650802 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-internal-tls-certs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.753174 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-public-tls-certs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.753266 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhm72\" (UniqueName: \"kubernetes.io/projected/361b4085-e6a8-45c8-b172-fe0fc321ca17-kube-api-access-dhm72\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.753302 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/361b4085-e6a8-45c8-b172-fe0fc321ca17-logs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.753379 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-config-data\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.753408 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.753445 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-internal-tls-certs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.754404 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/361b4085-e6a8-45c8-b172-fe0fc321ca17-logs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.759933 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-public-tls-certs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.759995 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.760163 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-internal-tls-certs\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.761406 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-config-data\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.774928 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhm72\" (UniqueName: \"kubernetes.io/projected/361b4085-e6a8-45c8-b172-fe0fc321ca17-kube-api-access-dhm72\") pod \"nova-api-0\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " pod="openstack/nova-api-0" Dec 08 21:45:36 crc kubenswrapper[4791]: I1208 21:45:36.861011 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:37 crc kubenswrapper[4791]: I1208 21:45:37.373010 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:37 crc kubenswrapper[4791]: I1208 21:45:37.486395 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"361b4085-e6a8-45c8-b172-fe0fc321ca17","Type":"ContainerStarted","Data":"45058b4b6dd9d772ff6ca2cf5781cc2e8ca84bfab71a812f8e0e77a2699740de"} Dec 08 21:45:37 crc kubenswrapper[4791]: I1208 21:45:37.616087 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d11f729-775d-4a25-8eb4-27757ae78469" path="/var/lib/kubelet/pods/3d11f729-775d-4a25-8eb4-27757ae78469/volumes" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.371974 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.397989 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.499796 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"361b4085-e6a8-45c8-b172-fe0fc321ca17","Type":"ContainerStarted","Data":"376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00"} Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.499910 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"361b4085-e6a8-45c8-b172-fe0fc321ca17","Type":"ContainerStarted","Data":"69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb"} Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.518244 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.524141 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.524112434 podStartE2EDuration="2.524112434s" podCreationTimestamp="2025-12-08 21:45:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:38.518118486 +0000 UTC m=+1615.216876831" watchObservedRunningTime="2025-12-08 21:45:38.524112434 +0000 UTC m=+1615.222870809" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.703528 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-ss2d8"] Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.705693 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.708168 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.708168 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.733340 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-ss2d8"] Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.825310 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-scripts\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.825445 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.825495 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx4kw\" (UniqueName: \"kubernetes.io/projected/8006edc4-8fd5-474b-b98b-a70c34c93f33-kube-api-access-dx4kw\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.825589 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-config-data\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.928099 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-scripts\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.928232 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.928275 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx4kw\" (UniqueName: \"kubernetes.io/projected/8006edc4-8fd5-474b-b98b-a70c34c93f33-kube-api-access-dx4kw\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.928332 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-config-data\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.934963 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-config-data\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.936296 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-scripts\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.938335 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:38 crc kubenswrapper[4791]: I1208 21:45:38.947430 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx4kw\" (UniqueName: \"kubernetes.io/projected/8006edc4-8fd5-474b-b98b-a70c34c93f33-kube-api-access-dx4kw\") pod \"nova-cell1-cell-mapping-ss2d8\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:39 crc kubenswrapper[4791]: I1208 21:45:39.027701 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.106087 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f84f9ccf-tdxxg" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.180059 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-6nlb8"] Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.180562 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" podUID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerName="dnsmasq-dns" containerID="cri-o://48914afeba64de2d059d0887ff0a37e0596c8341a2eb0e98f3a2b07176164fd1" gracePeriod=10 Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.389163 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-ss2d8"] Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.582544 4791 generic.go:334] "Generic (PLEG): container finished" podID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerID="48914afeba64de2d059d0887ff0a37e0596c8341a2eb0e98f3a2b07176164fd1" exitCode=0 Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.582622 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" event={"ID":"2af01a9d-314c-4443-bd34-ba54d4d5a3fd","Type":"ContainerDied","Data":"48914afeba64de2d059d0887ff0a37e0596c8341a2eb0e98f3a2b07176164fd1"} Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.590515 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ss2d8" event={"ID":"8006edc4-8fd5-474b-b98b-a70c34c93f33","Type":"ContainerStarted","Data":"fd127a327244bbd8952800433a96d063d418b31576d05e30fe9dcad1f7650fc5"} Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.726140 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.790386 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-swift-storage-0\") pod \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.790436 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l4m8\" (UniqueName: \"kubernetes.io/projected/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-kube-api-access-7l4m8\") pod \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.790538 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-nb\") pod \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.790567 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-svc\") pod \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.790597 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-sb\") pod \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.790680 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-config\") pod \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\" (UID: \"2af01a9d-314c-4443-bd34-ba54d4d5a3fd\") " Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.816444 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-kube-api-access-7l4m8" (OuterVolumeSpecName: "kube-api-access-7l4m8") pod "2af01a9d-314c-4443-bd34-ba54d4d5a3fd" (UID: "2af01a9d-314c-4443-bd34-ba54d4d5a3fd"). InnerVolumeSpecName "kube-api-access-7l4m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.855166 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2af01a9d-314c-4443-bd34-ba54d4d5a3fd" (UID: "2af01a9d-314c-4443-bd34-ba54d4d5a3fd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.855158 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-config" (OuterVolumeSpecName: "config") pod "2af01a9d-314c-4443-bd34-ba54d4d5a3fd" (UID: "2af01a9d-314c-4443-bd34-ba54d4d5a3fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.858914 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2af01a9d-314c-4443-bd34-ba54d4d5a3fd" (UID: "2af01a9d-314c-4443-bd34-ba54d4d5a3fd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.865113 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2af01a9d-314c-4443-bd34-ba54d4d5a3fd" (UID: "2af01a9d-314c-4443-bd34-ba54d4d5a3fd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.867214 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2af01a9d-314c-4443-bd34-ba54d4d5a3fd" (UID: "2af01a9d-314c-4443-bd34-ba54d4d5a3fd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.893438 4791 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.893473 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l4m8\" (UniqueName: \"kubernetes.io/projected/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-kube-api-access-7l4m8\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.893483 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.893493 4791 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.893503 4791 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:40 crc kubenswrapper[4791]: I1208 21:45:40.893511 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2af01a9d-314c-4443-bd34-ba54d4d5a3fd-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.605559 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.620638 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-6nlb8" event={"ID":"2af01a9d-314c-4443-bd34-ba54d4d5a3fd","Type":"ContainerDied","Data":"82dd26d2085332d1bac34713fa42fdc9c9771e4b873dcd9e7bc5dcf74989cbe6"} Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.620692 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ss2d8" event={"ID":"8006edc4-8fd5-474b-b98b-a70c34c93f33","Type":"ContainerStarted","Data":"8a5158d9484db47869bc4bb710b7fd477fef7abfb938ba54a5ce3368f10057e2"} Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.620878 4791 scope.go:117] "RemoveContainer" containerID="48914afeba64de2d059d0887ff0a37e0596c8341a2eb0e98f3a2b07176164fd1" Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.646918 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-ss2d8" podStartSLOduration=3.646897065 podStartE2EDuration="3.646897065s" podCreationTimestamp="2025-12-08 21:45:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:41.63254116 +0000 UTC m=+1618.331299525" watchObservedRunningTime="2025-12-08 21:45:41.646897065 +0000 UTC m=+1618.345655410" Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.652108 4791 scope.go:117] "RemoveContainer" containerID="e1989dbd887675fa9a730372a0e3a94f045cd664ce7d4b06bf600bee73bd3e60" Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.669433 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-6nlb8"] Dec 08 21:45:41 crc kubenswrapper[4791]: I1208 21:45:41.680495 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-6nlb8"] Dec 08 21:45:43 crc kubenswrapper[4791]: I1208 21:45:43.612094 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" path="/var/lib/kubelet/pods/2af01a9d-314c-4443-bd34-ba54d4d5a3fd/volumes" Dec 08 21:45:45 crc kubenswrapper[4791]: I1208 21:45:45.665444 4791 generic.go:334] "Generic (PLEG): container finished" podID="8006edc4-8fd5-474b-b98b-a70c34c93f33" containerID="8a5158d9484db47869bc4bb710b7fd477fef7abfb938ba54a5ce3368f10057e2" exitCode=0 Dec 08 21:45:45 crc kubenswrapper[4791]: I1208 21:45:45.665515 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ss2d8" event={"ID":"8006edc4-8fd5-474b-b98b-a70c34c93f33","Type":"ContainerDied","Data":"8a5158d9484db47869bc4bb710b7fd477fef7abfb938ba54a5ce3368f10057e2"} Dec 08 21:45:46 crc kubenswrapper[4791]: I1208 21:45:46.863653 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:45:46 crc kubenswrapper[4791]: I1208 21:45:46.863922 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.145667 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.244605 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-scripts\") pod \"8006edc4-8fd5-474b-b98b-a70c34c93f33\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.245804 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx4kw\" (UniqueName: \"kubernetes.io/projected/8006edc4-8fd5-474b-b98b-a70c34c93f33-kube-api-access-dx4kw\") pod \"8006edc4-8fd5-474b-b98b-a70c34c93f33\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.245977 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-config-data\") pod \"8006edc4-8fd5-474b-b98b-a70c34c93f33\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.246399 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-combined-ca-bundle\") pod \"8006edc4-8fd5-474b-b98b-a70c34c93f33\" (UID: \"8006edc4-8fd5-474b-b98b-a70c34c93f33\") " Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.251581 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8006edc4-8fd5-474b-b98b-a70c34c93f33-kube-api-access-dx4kw" (OuterVolumeSpecName: "kube-api-access-dx4kw") pod "8006edc4-8fd5-474b-b98b-a70c34c93f33" (UID: "8006edc4-8fd5-474b-b98b-a70c34c93f33"). InnerVolumeSpecName "kube-api-access-dx4kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.252535 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-scripts" (OuterVolumeSpecName: "scripts") pod "8006edc4-8fd5-474b-b98b-a70c34c93f33" (UID: "8006edc4-8fd5-474b-b98b-a70c34c93f33"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.280764 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8006edc4-8fd5-474b-b98b-a70c34c93f33" (UID: "8006edc4-8fd5-474b-b98b-a70c34c93f33"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.283236 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-config-data" (OuterVolumeSpecName: "config-data") pod "8006edc4-8fd5-474b-b98b-a70c34c93f33" (UID: "8006edc4-8fd5-474b-b98b-a70c34c93f33"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.350154 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.350395 4791 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.350472 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx4kw\" (UniqueName: \"kubernetes.io/projected/8006edc4-8fd5-474b-b98b-a70c34c93f33-kube-api-access-dx4kw\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.350548 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8006edc4-8fd5-474b-b98b-a70c34c93f33-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.598473 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:45:47 crc kubenswrapper[4791]: E1208 21:45:47.598917 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.688220 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-ss2d8" event={"ID":"8006edc4-8fd5-474b-b98b-a70c34c93f33","Type":"ContainerDied","Data":"fd127a327244bbd8952800433a96d063d418b31576d05e30fe9dcad1f7650fc5"} Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.688273 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd127a327244bbd8952800433a96d063d418b31576d05e30fe9dcad1f7650fc5" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.688282 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-ss2d8" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.882972 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.230:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.883328 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.230:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.921876 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.922404 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-log" containerID="cri-o://69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb" gracePeriod=30 Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.922727 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-api" containerID="cri-o://376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00" gracePeriod=30 Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.960873 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.961933 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" containerName="nova-scheduler-scheduler" containerID="cri-o://e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294" gracePeriod=30 Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.987257 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.987567 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-log" containerID="cri-o://cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7" gracePeriod=30 Dec 08 21:45:47 crc kubenswrapper[4791]: I1208 21:45:47.988254 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-metadata" containerID="cri-o://be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732" gracePeriod=30 Dec 08 21:45:48 crc kubenswrapper[4791]: I1208 21:45:48.702555 4791 generic.go:334] "Generic (PLEG): container finished" podID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerID="69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb" exitCode=143 Dec 08 21:45:48 crc kubenswrapper[4791]: I1208 21:45:48.702776 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"361b4085-e6a8-45c8-b172-fe0fc321ca17","Type":"ContainerDied","Data":"69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb"} Dec 08 21:45:48 crc kubenswrapper[4791]: I1208 21:45:48.705326 4791 generic.go:334] "Generic (PLEG): container finished" podID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerID="cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7" exitCode=143 Dec 08 21:45:48 crc kubenswrapper[4791]: I1208 21:45:48.705361 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3","Type":"ContainerDied","Data":"cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7"} Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.319609 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.518068 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-combined-ca-bundle\") pod \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.518118 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-config-data\") pod \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.518198 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkbtf\" (UniqueName: \"kubernetes.io/projected/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-kube-api-access-qkbtf\") pod \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\" (UID: \"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93\") " Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.524090 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-kube-api-access-qkbtf" (OuterVolumeSpecName: "kube-api-access-qkbtf") pod "2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" (UID: "2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93"). InnerVolumeSpecName "kube-api-access-qkbtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.559912 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" (UID: "2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.566943 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-config-data" (OuterVolumeSpecName: "config-data") pod "2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" (UID: "2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.622478 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.624314 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.624535 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkbtf\" (UniqueName: \"kubernetes.io/projected/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93-kube-api-access-qkbtf\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.731080 4791 generic.go:334] "Generic (PLEG): container finished" podID="2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" containerID="e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294" exitCode=0 Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.731130 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93","Type":"ContainerDied","Data":"e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294"} Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.731158 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93","Type":"ContainerDied","Data":"3b7773fa49087b2e782afc5bbb5a6428c2e852436cafd9f325fe817e74c95917"} Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.731176 4791 scope.go:117] "RemoveContainer" containerID="e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.731313 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.764315 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.765496 4791 scope.go:117] "RemoveContainer" containerID="e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294" Dec 08 21:45:50 crc kubenswrapper[4791]: E1208 21:45:50.766342 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294\": container with ID starting with e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294 not found: ID does not exist" containerID="e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.766383 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294"} err="failed to get container status \"e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294\": rpc error: code = NotFound desc = could not find container \"e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294\": container with ID starting with e71c442fd99e3bb56ebf7dd62f24739682e37beec823fed946303692aac2a294 not found: ID does not exist" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.793887 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.819035 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:50 crc kubenswrapper[4791]: E1208 21:45:50.820923 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerName="dnsmasq-dns" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.820959 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerName="dnsmasq-dns" Dec 08 21:45:50 crc kubenswrapper[4791]: E1208 21:45:50.821003 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8006edc4-8fd5-474b-b98b-a70c34c93f33" containerName="nova-manage" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.821015 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8006edc4-8fd5-474b-b98b-a70c34c93f33" containerName="nova-manage" Dec 08 21:45:50 crc kubenswrapper[4791]: E1208 21:45:50.821048 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" containerName="nova-scheduler-scheduler" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.821056 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" containerName="nova-scheduler-scheduler" Dec 08 21:45:50 crc kubenswrapper[4791]: E1208 21:45:50.821107 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerName="init" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.821118 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerName="init" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.825539 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="2af01a9d-314c-4443-bd34-ba54d4d5a3fd" containerName="dnsmasq-dns" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.825746 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" containerName="nova-scheduler-scheduler" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.825775 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="8006edc4-8fd5-474b-b98b-a70c34c93f33" containerName="nova-manage" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.828610 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.834952 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.835132 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.930980 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cafa4036-e745-4f13-abd0-06e498ef4bcc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.931250 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj44b\" (UniqueName: \"kubernetes.io/projected/cafa4036-e745-4f13-abd0-06e498ef4bcc-kube-api-access-vj44b\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:50 crc kubenswrapper[4791]: I1208 21:45:50.932110 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cafa4036-e745-4f13-abd0-06e498ef4bcc-config-data\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.034557 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cafa4036-e745-4f13-abd0-06e498ef4bcc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.034654 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj44b\" (UniqueName: \"kubernetes.io/projected/cafa4036-e745-4f13-abd0-06e498ef4bcc-kube-api-access-vj44b\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.034813 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cafa4036-e745-4f13-abd0-06e498ef4bcc-config-data\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.041898 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cafa4036-e745-4f13-abd0-06e498ef4bcc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.044232 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cafa4036-e745-4f13-abd0-06e498ef4bcc-config-data\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.051638 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj44b\" (UniqueName: \"kubernetes.io/projected/cafa4036-e745-4f13-abd0-06e498ef4bcc-kube-api-access-vj44b\") pod \"nova-scheduler-0\" (UID: \"cafa4036-e745-4f13-abd0-06e498ef4bcc\") " pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.152219 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.223547 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": read tcp 10.217.0.2:52698->10.217.0.224:8775: read: connection reset by peer" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.223639 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": read tcp 10.217.0.2:52700->10.217.0.224:8775: read: connection reset by peer" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.624066 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93" path="/var/lib/kubelet/pods/2ea002f9-8dd7-4ca0-9e87-7724d7cf0b93/volumes" Dec 08 21:45:51 crc kubenswrapper[4791]: W1208 21:45:51.641085 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcafa4036_e745_4f13_abd0_06e498ef4bcc.slice/crio-4e3aeafdb51e6a9ac2087b26002287c19459a31f443c8e84d9e8a1277d559083 WatchSource:0}: Error finding container 4e3aeafdb51e6a9ac2087b26002287c19459a31f443c8e84d9e8a1277d559083: Status 404 returned error can't find the container with id 4e3aeafdb51e6a9ac2087b26002287c19459a31f443c8e84d9e8a1277d559083 Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.642031 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.690625 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.745326 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cafa4036-e745-4f13-abd0-06e498ef4bcc","Type":"ContainerStarted","Data":"4e3aeafdb51e6a9ac2087b26002287c19459a31f443c8e84d9e8a1277d559083"} Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.750475 4791 generic.go:334] "Generic (PLEG): container finished" podID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerID="be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732" exitCode=0 Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.750539 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3","Type":"ContainerDied","Data":"be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732"} Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.750579 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3","Type":"ContainerDied","Data":"d34fd8293cb97ed8854d871a84163f1fb9f308509523ac44cf5d3d965fe08f3f"} Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.750606 4791 scope.go:117] "RemoveContainer" containerID="be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.750831 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.794438 4791 scope.go:117] "RemoveContainer" containerID="cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.821534 4791 scope.go:117] "RemoveContainer" containerID="be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732" Dec 08 21:45:51 crc kubenswrapper[4791]: E1208 21:45:51.822094 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732\": container with ID starting with be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732 not found: ID does not exist" containerID="be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.822143 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732"} err="failed to get container status \"be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732\": rpc error: code = NotFound desc = could not find container \"be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732\": container with ID starting with be8501a1cb4c3716fc13390963b59d9253b40d62a962860a2ddf348781872732 not found: ID does not exist" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.822177 4791 scope.go:117] "RemoveContainer" containerID="cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7" Dec 08 21:45:51 crc kubenswrapper[4791]: E1208 21:45:51.822500 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7\": container with ID starting with cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7 not found: ID does not exist" containerID="cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.822561 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7"} err="failed to get container status \"cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7\": rpc error: code = NotFound desc = could not find container \"cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7\": container with ID starting with cc7c452e6d527ca1eab3093793a72d2f58010224b1c02b690fa2ab1c6c95a5b7 not found: ID does not exist" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.864476 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-nova-metadata-tls-certs\") pod \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.864632 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-logs\") pod \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.864864 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-config-data\") pod \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.864906 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9v6hn\" (UniqueName: \"kubernetes.io/projected/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-kube-api-access-9v6hn\") pod \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.864929 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-combined-ca-bundle\") pod \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\" (UID: \"4aecb1a7-b3e4-40b9-baa9-3daa133d63f3\") " Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.869387 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-logs" (OuterVolumeSpecName: "logs") pod "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" (UID: "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.869890 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-kube-api-access-9v6hn" (OuterVolumeSpecName: "kube-api-access-9v6hn") pod "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" (UID: "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3"). InnerVolumeSpecName "kube-api-access-9v6hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.904000 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" (UID: "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.912964 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-config-data" (OuterVolumeSpecName: "config-data") pod "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" (UID: "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.937048 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" (UID: "4aecb1a7-b3e4-40b9-baa9-3daa133d63f3"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.967815 4791 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.967858 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.967871 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.967883 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9v6hn\" (UniqueName: \"kubernetes.io/projected/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-kube-api-access-9v6hn\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:51 crc kubenswrapper[4791]: I1208 21:45:51.967896 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.162377 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.177815 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.198434 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:52 crc kubenswrapper[4791]: E1208 21:45:52.199204 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-metadata" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.199231 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-metadata" Dec 08 21:45:52 crc kubenswrapper[4791]: E1208 21:45:52.199247 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-log" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.199255 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-log" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.199532 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-log" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.199594 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" containerName="nova-metadata-metadata" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.201481 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.209924 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.210396 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.228144 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.376935 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-config-data\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.377053 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.377098 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e85819c2-ae52-42b3-89d5-426970706586-logs\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.377127 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szx8c\" (UniqueName: \"kubernetes.io/projected/e85819c2-ae52-42b3-89d5-426970706586-kube-api-access-szx8c\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.377260 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.479122 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.479244 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-config-data\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.479332 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.479376 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e85819c2-ae52-42b3-89d5-426970706586-logs\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.479406 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szx8c\" (UniqueName: \"kubernetes.io/projected/e85819c2-ae52-42b3-89d5-426970706586-kube-api-access-szx8c\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.480451 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e85819c2-ae52-42b3-89d5-426970706586-logs\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.485115 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.488977 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-config-data\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.494327 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85819c2-ae52-42b3-89d5-426970706586-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.498171 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szx8c\" (UniqueName: \"kubernetes.io/projected/e85819c2-ae52-42b3-89d5-426970706586-kube-api-access-szx8c\") pod \"nova-metadata-0\" (UID: \"e85819c2-ae52-42b3-89d5-426970706586\") " pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.533380 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.767490 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cafa4036-e745-4f13-abd0-06e498ef4bcc","Type":"ContainerStarted","Data":"d2afce3d717edc394508f969656c2cda890b1427b2ac8e377156d0f9f191cc80"} Dec 08 21:45:52 crc kubenswrapper[4791]: I1208 21:45:52.798685 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.798663572 podStartE2EDuration="2.798663572s" podCreationTimestamp="2025-12-08 21:45:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:52.787510996 +0000 UTC m=+1629.486269351" watchObservedRunningTime="2025-12-08 21:45:52.798663572 +0000 UTC m=+1629.497421917" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.029914 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:45:53 crc kubenswrapper[4791]: W1208 21:45:53.050543 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode85819c2_ae52_42b3_89d5_426970706586.slice/crio-1b92f2083a02cfd28ed4e31ad875d7a0228d4cc87123863a1931443f971ac4e0 WatchSource:0}: Error finding container 1b92f2083a02cfd28ed4e31ad875d7a0228d4cc87123863a1931443f971ac4e0: Status 404 returned error can't find the container with id 1b92f2083a02cfd28ed4e31ad875d7a0228d4cc87123863a1931443f971ac4e0 Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.621904 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4aecb1a7-b3e4-40b9-baa9-3daa133d63f3" path="/var/lib/kubelet/pods/4aecb1a7-b3e4-40b9-baa9-3daa133d63f3/volumes" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.765644 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.805750 4791 generic.go:334] "Generic (PLEG): container finished" podID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerID="376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00" exitCode=0 Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.805827 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"361b4085-e6a8-45c8-b172-fe0fc321ca17","Type":"ContainerDied","Data":"376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00"} Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.805855 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"361b4085-e6a8-45c8-b172-fe0fc321ca17","Type":"ContainerDied","Data":"45058b4b6dd9d772ff6ca2cf5781cc2e8ca84bfab71a812f8e0e77a2699740de"} Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.805877 4791 scope.go:117] "RemoveContainer" containerID="376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.806018 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.814098 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e85819c2-ae52-42b3-89d5-426970706586","Type":"ContainerStarted","Data":"9340f4f647939108eae0be39baa4d72e556cbf0d7ca53e933bc5302cca1fec3f"} Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.814150 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e85819c2-ae52-42b3-89d5-426970706586","Type":"ContainerStarted","Data":"719c2fb326017a576746c6fc87bd34c850a687d14ce46ee09d3907bcd8cf8186"} Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.814166 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e85819c2-ae52-42b3-89d5-426970706586","Type":"ContainerStarted","Data":"1b92f2083a02cfd28ed4e31ad875d7a0228d4cc87123863a1931443f971ac4e0"} Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.863602 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.863572833 podStartE2EDuration="1.863572833s" podCreationTimestamp="2025-12-08 21:45:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:53.838724017 +0000 UTC m=+1630.537482372" watchObservedRunningTime="2025-12-08 21:45:53.863572833 +0000 UTC m=+1630.562331178" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.864024 4791 scope.go:117] "RemoveContainer" containerID="69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.895637 4791 scope.go:117] "RemoveContainer" containerID="376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00" Dec 08 21:45:53 crc kubenswrapper[4791]: E1208 21:45:53.896794 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00\": container with ID starting with 376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00 not found: ID does not exist" containerID="376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.896851 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00"} err="failed to get container status \"376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00\": rpc error: code = NotFound desc = could not find container \"376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00\": container with ID starting with 376c8a519bb2097c2d2d3f063fbab284a521d60521c6c5d3b3c78557cfa06a00 not found: ID does not exist" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.896930 4791 scope.go:117] "RemoveContainer" containerID="69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb" Dec 08 21:45:53 crc kubenswrapper[4791]: E1208 21:45:53.900052 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb\": container with ID starting with 69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb not found: ID does not exist" containerID="69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.900114 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb"} err="failed to get container status \"69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb\": rpc error: code = NotFound desc = could not find container \"69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb\": container with ID starting with 69cffc6c29d3db8e3943d6e1a570667a1a4a48654124d32fcb8a8ca693ef02cb not found: ID does not exist" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.918407 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-public-tls-certs\") pod \"361b4085-e6a8-45c8-b172-fe0fc321ca17\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.918563 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhm72\" (UniqueName: \"kubernetes.io/projected/361b4085-e6a8-45c8-b172-fe0fc321ca17-kube-api-access-dhm72\") pod \"361b4085-e6a8-45c8-b172-fe0fc321ca17\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.918608 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-internal-tls-certs\") pod \"361b4085-e6a8-45c8-b172-fe0fc321ca17\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.918731 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-config-data\") pod \"361b4085-e6a8-45c8-b172-fe0fc321ca17\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.918767 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/361b4085-e6a8-45c8-b172-fe0fc321ca17-logs\") pod \"361b4085-e6a8-45c8-b172-fe0fc321ca17\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.918880 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-combined-ca-bundle\") pod \"361b4085-e6a8-45c8-b172-fe0fc321ca17\" (UID: \"361b4085-e6a8-45c8-b172-fe0fc321ca17\") " Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.920103 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/361b4085-e6a8-45c8-b172-fe0fc321ca17-logs" (OuterVolumeSpecName: "logs") pod "361b4085-e6a8-45c8-b172-fe0fc321ca17" (UID: "361b4085-e6a8-45c8-b172-fe0fc321ca17"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.922975 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/361b4085-e6a8-45c8-b172-fe0fc321ca17-kube-api-access-dhm72" (OuterVolumeSpecName: "kube-api-access-dhm72") pod "361b4085-e6a8-45c8-b172-fe0fc321ca17" (UID: "361b4085-e6a8-45c8-b172-fe0fc321ca17"). InnerVolumeSpecName "kube-api-access-dhm72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.953414 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "361b4085-e6a8-45c8-b172-fe0fc321ca17" (UID: "361b4085-e6a8-45c8-b172-fe0fc321ca17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.957834 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-config-data" (OuterVolumeSpecName: "config-data") pod "361b4085-e6a8-45c8-b172-fe0fc321ca17" (UID: "361b4085-e6a8-45c8-b172-fe0fc321ca17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:53 crc kubenswrapper[4791]: I1208 21:45:53.977163 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "361b4085-e6a8-45c8-b172-fe0fc321ca17" (UID: "361b4085-e6a8-45c8-b172-fe0fc321ca17"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.003822 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "361b4085-e6a8-45c8-b172-fe0fc321ca17" (UID: "361b4085-e6a8-45c8-b172-fe0fc321ca17"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.028704 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhm72\" (UniqueName: \"kubernetes.io/projected/361b4085-e6a8-45c8-b172-fe0fc321ca17-kube-api-access-dhm72\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.028946 4791 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.029018 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.029088 4791 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/361b4085-e6a8-45c8-b172-fe0fc321ca17-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.029190 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.029264 4791 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/361b4085-e6a8-45c8-b172-fe0fc321ca17-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.211022 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.225503 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.247421 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:54 crc kubenswrapper[4791]: E1208 21:45:54.248012 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-log" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.248031 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-log" Dec 08 21:45:54 crc kubenswrapper[4791]: E1208 21:45:54.248068 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-api" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.248075 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-api" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.248304 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-log" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.248319 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" containerName="nova-api-api" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.249655 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.255247 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.255546 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.255880 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.306728 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.437908 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.437978 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dh2m\" (UniqueName: \"kubernetes.io/projected/f6371760-9057-4d77-9e38-3a8523adb28f-kube-api-access-7dh2m\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.438017 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-config-data\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.438636 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6371760-9057-4d77-9e38-3a8523adb28f-logs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.438779 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.438898 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-public-tls-certs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.540573 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6371760-9057-4d77-9e38-3a8523adb28f-logs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.540636 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.540694 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-public-tls-certs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.540768 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.540801 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dh2m\" (UniqueName: \"kubernetes.io/projected/f6371760-9057-4d77-9e38-3a8523adb28f-kube-api-access-7dh2m\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.540854 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-config-data\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.541124 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6371760-9057-4d77-9e38-3a8523adb28f-logs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.545626 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-public-tls-certs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.545851 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.548818 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.550516 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6371760-9057-4d77-9e38-3a8523adb28f-config-data\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.567571 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dh2m\" (UniqueName: \"kubernetes.io/projected/f6371760-9057-4d77-9e38-3a8523adb28f-kube-api-access-7dh2m\") pod \"nova-api-0\" (UID: \"f6371760-9057-4d77-9e38-3a8523adb28f\") " pod="openstack/nova-api-0" Dec 08 21:45:54 crc kubenswrapper[4791]: I1208 21:45:54.616756 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:45:55 crc kubenswrapper[4791]: I1208 21:45:55.249870 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:45:55 crc kubenswrapper[4791]: W1208 21:45:55.252286 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6371760_9057_4d77_9e38_3a8523adb28f.slice/crio-4c24b442307ef2b44132199013fc1d522b51e1ccff58bd08e7b4cb008ff8f672 WatchSource:0}: Error finding container 4c24b442307ef2b44132199013fc1d522b51e1ccff58bd08e7b4cb008ff8f672: Status 404 returned error can't find the container with id 4c24b442307ef2b44132199013fc1d522b51e1ccff58bd08e7b4cb008ff8f672 Dec 08 21:45:55 crc kubenswrapper[4791]: I1208 21:45:55.611651 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="361b4085-e6a8-45c8-b172-fe0fc321ca17" path="/var/lib/kubelet/pods/361b4085-e6a8-45c8-b172-fe0fc321ca17/volumes" Dec 08 21:45:55 crc kubenswrapper[4791]: I1208 21:45:55.869436 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6371760-9057-4d77-9e38-3a8523adb28f","Type":"ContainerStarted","Data":"fe227a60202be222a03d7b9a668b429078c462d09521edf5e0240f213e535f32"} Dec 08 21:45:55 crc kubenswrapper[4791]: I1208 21:45:55.869779 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6371760-9057-4d77-9e38-3a8523adb28f","Type":"ContainerStarted","Data":"1573cd501cec540d6850606dcfe3844495c3edfb5ec73e6c1c2260bb5391aa91"} Dec 08 21:45:55 crc kubenswrapper[4791]: I1208 21:45:55.869792 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6371760-9057-4d77-9e38-3a8523adb28f","Type":"ContainerStarted","Data":"4c24b442307ef2b44132199013fc1d522b51e1ccff58bd08e7b4cb008ff8f672"} Dec 08 21:45:55 crc kubenswrapper[4791]: I1208 21:45:55.890192 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.890171067 podStartE2EDuration="1.890171067s" podCreationTimestamp="2025-12-08 21:45:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:45:55.886360013 +0000 UTC m=+1632.585118368" watchObservedRunningTime="2025-12-08 21:45:55.890171067 +0000 UTC m=+1632.588929402" Dec 08 21:45:56 crc kubenswrapper[4791]: I1208 21:45:56.152361 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 08 21:45:57 crc kubenswrapper[4791]: I1208 21:45:57.533530 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:45:57 crc kubenswrapper[4791]: I1208 21:45:57.534926 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:46:01 crc kubenswrapper[4791]: I1208 21:46:01.153199 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 08 21:46:01 crc kubenswrapper[4791]: I1208 21:46:01.208333 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 08 21:46:01 crc kubenswrapper[4791]: I1208 21:46:01.598213 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:46:01 crc kubenswrapper[4791]: E1208 21:46:01.598648 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:46:02 crc kubenswrapper[4791]: I1208 21:46:02.000059 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 08 21:46:02 crc kubenswrapper[4791]: I1208 21:46:02.534430 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:46:02 crc kubenswrapper[4791]: I1208 21:46:02.534746 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:46:03 crc kubenswrapper[4791]: I1208 21:46:03.548954 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e85819c2-ae52-42b3-89d5-426970706586" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.233:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:46:03 crc kubenswrapper[4791]: I1208 21:46:03.548976 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e85819c2-ae52-42b3-89d5-426970706586" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.233:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:46:04 crc kubenswrapper[4791]: I1208 21:46:04.618119 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:46:04 crc kubenswrapper[4791]: I1208 21:46:04.619459 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:46:05 crc kubenswrapper[4791]: I1208 21:46:05.631951 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f6371760-9057-4d77-9e38-3a8523adb28f" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.234:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:46:05 crc kubenswrapper[4791]: I1208 21:46:05.631951 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f6371760-9057-4d77-9e38-3a8523adb28f" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.234:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:46:12 crc kubenswrapper[4791]: I1208 21:46:12.541257 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:46:12 crc kubenswrapper[4791]: I1208 21:46:12.542021 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:46:12 crc kubenswrapper[4791]: I1208 21:46:12.548872 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:46:12 crc kubenswrapper[4791]: I1208 21:46:12.549218 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:46:13 crc kubenswrapper[4791]: I1208 21:46:13.607496 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:46:13 crc kubenswrapper[4791]: E1208 21:46:13.608074 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:46:14 crc kubenswrapper[4791]: I1208 21:46:14.625657 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:46:14 crc kubenswrapper[4791]: I1208 21:46:14.627363 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:46:14 crc kubenswrapper[4791]: I1208 21:46:14.627882 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:46:14 crc kubenswrapper[4791]: I1208 21:46:14.632309 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:46:15 crc kubenswrapper[4791]: I1208 21:46:15.138448 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:46:15 crc kubenswrapper[4791]: I1208 21:46:15.150099 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:46:26 crc kubenswrapper[4791]: I1208 21:46:26.598846 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:46:26 crc kubenswrapper[4791]: E1208 21:46:26.599673 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:46:41 crc kubenswrapper[4791]: I1208 21:46:41.598808 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:46:41 crc kubenswrapper[4791]: E1208 21:46:41.599769 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:46:53 crc kubenswrapper[4791]: I1208 21:46:53.607147 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:46:53 crc kubenswrapper[4791]: E1208 21:46:53.607842 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:47:04 crc kubenswrapper[4791]: I1208 21:47:04.598476 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:47:04 crc kubenswrapper[4791]: E1208 21:47:04.599396 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:47:15 crc kubenswrapper[4791]: I1208 21:47:15.598489 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:47:15 crc kubenswrapper[4791]: E1208 21:47:15.600687 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:47:25 crc kubenswrapper[4791]: I1208 21:47:25.883568 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330" exitCode=1 Dec 08 21:47:25 crc kubenswrapper[4791]: I1208 21:47:25.883629 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330"} Dec 08 21:47:25 crc kubenswrapper[4791]: I1208 21:47:25.884023 4791 scope.go:117] "RemoveContainer" containerID="6aaa3b083eed387233afd8e27c4bf5a9ae6d9cb075ffa437c67d681e065ad29e" Dec 08 21:47:25 crc kubenswrapper[4791]: I1208 21:47:25.884394 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.117:8081/readyz\": dial tcp 10.217.0.117:8081: connect: connection refused" Dec 08 21:47:25 crc kubenswrapper[4791]: I1208 21:47:25.884582 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:47:25 crc kubenswrapper[4791]: I1208 21:47:25.885195 4791 scope.go:117] "RemoveContainer" containerID="a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330" Dec 08 21:47:25 crc kubenswrapper[4791]: E1208 21:47:25.885742 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:47:26 crc kubenswrapper[4791]: I1208 21:47:26.897246 4791 scope.go:117] "RemoveContainer" containerID="a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330" Dec 08 21:47:26 crc kubenswrapper[4791]: E1208 21:47:26.897643 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:47:30 crc kubenswrapper[4791]: I1208 21:47:30.598354 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:47:30 crc kubenswrapper[4791]: E1208 21:47:30.599170 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:47:35 crc kubenswrapper[4791]: I1208 21:47:35.884094 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:47:35 crc kubenswrapper[4791]: I1208 21:47:35.885478 4791 scope.go:117] "RemoveContainer" containerID="a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330" Dec 08 21:47:35 crc kubenswrapper[4791]: E1208 21:47:35.885836 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:47:45 crc kubenswrapper[4791]: I1208 21:47:45.598217 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:47:45 crc kubenswrapper[4791]: E1208 21:47:45.599078 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:47:50 crc kubenswrapper[4791]: I1208 21:47:50.598112 4791 scope.go:117] "RemoveContainer" containerID="a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330" Dec 08 21:47:51 crc kubenswrapper[4791]: I1208 21:47:51.169433 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4"} Dec 08 21:47:51 crc kubenswrapper[4791]: I1208 21:47:51.170061 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.480143 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qh8xd"] Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.484350 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.498012 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qh8xd"] Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.657944 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-catalog-content\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.658000 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-utilities\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.658381 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw8ln\" (UniqueName: \"kubernetes.io/projected/ac6734ae-0444-4576-aa83-49f70e05ade6-kube-api-access-hw8ln\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.760985 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-catalog-content\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.761047 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-utilities\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.761155 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw8ln\" (UniqueName: \"kubernetes.io/projected/ac6734ae-0444-4576-aa83-49f70e05ade6-kube-api-access-hw8ln\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.761811 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-catalog-content\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.761877 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-utilities\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.785284 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw8ln\" (UniqueName: \"kubernetes.io/projected/ac6734ae-0444-4576-aa83-49f70e05ade6-kube-api-access-hw8ln\") pod \"certified-operators-qh8xd\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:54 crc kubenswrapper[4791]: I1208 21:47:54.834370 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:47:55 crc kubenswrapper[4791]: I1208 21:47:55.444317 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qh8xd"] Dec 08 21:47:55 crc kubenswrapper[4791]: I1208 21:47:55.887892 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:47:56 crc kubenswrapper[4791]: I1208 21:47:56.230362 4791 generic.go:334] "Generic (PLEG): container finished" podID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerID="3f3ddbfd05068b1cf2bde3e51b7070cd36143eac8ebe7d57fa9422944657e24a" exitCode=0 Dec 08 21:47:56 crc kubenswrapper[4791]: I1208 21:47:56.230467 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qh8xd" event={"ID":"ac6734ae-0444-4576-aa83-49f70e05ade6","Type":"ContainerDied","Data":"3f3ddbfd05068b1cf2bde3e51b7070cd36143eac8ebe7d57fa9422944657e24a"} Dec 08 21:47:56 crc kubenswrapper[4791]: I1208 21:47:56.230627 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qh8xd" event={"ID":"ac6734ae-0444-4576-aa83-49f70e05ade6","Type":"ContainerStarted","Data":"19821e003ed7346e51905f68e99dc9069c26fa0d7c48593366ed0154c9a10bc6"} Dec 08 21:47:56 crc kubenswrapper[4791]: I1208 21:47:56.232959 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:47:59 crc kubenswrapper[4791]: I1208 21:47:59.598308 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:47:59 crc kubenswrapper[4791]: E1208 21:47:59.599341 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:48:01 crc kubenswrapper[4791]: I1208 21:48:01.294909 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qh8xd" event={"ID":"ac6734ae-0444-4576-aa83-49f70e05ade6","Type":"ContainerStarted","Data":"f14aab193ae54ba74e48a72f07d288a4a08cc080df364d116512ba6707551fcf"} Dec 08 21:48:02 crc kubenswrapper[4791]: I1208 21:48:02.308681 4791 generic.go:334] "Generic (PLEG): container finished" podID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerID="f14aab193ae54ba74e48a72f07d288a4a08cc080df364d116512ba6707551fcf" exitCode=0 Dec 08 21:48:02 crc kubenswrapper[4791]: I1208 21:48:02.308740 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qh8xd" event={"ID":"ac6734ae-0444-4576-aa83-49f70e05ade6","Type":"ContainerDied","Data":"f14aab193ae54ba74e48a72f07d288a4a08cc080df364d116512ba6707551fcf"} Dec 08 21:48:03 crc kubenswrapper[4791]: I1208 21:48:03.365528 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qh8xd" event={"ID":"ac6734ae-0444-4576-aa83-49f70e05ade6","Type":"ContainerStarted","Data":"97036992f4080899ac7e8d3659c971363b4860dcd3f6ad119cd9ad0bb02a8c05"} Dec 08 21:48:03 crc kubenswrapper[4791]: I1208 21:48:03.390675 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qh8xd" podStartSLOduration=2.9439961500000003 podStartE2EDuration="9.39065686s" podCreationTimestamp="2025-12-08 21:47:54 +0000 UTC" firstStartedPulling="2025-12-08 21:47:56.232760401 +0000 UTC m=+1752.931518746" lastFinishedPulling="2025-12-08 21:48:02.679421111 +0000 UTC m=+1759.378179456" observedRunningTime="2025-12-08 21:48:03.389491381 +0000 UTC m=+1760.088249736" watchObservedRunningTime="2025-12-08 21:48:03.39065686 +0000 UTC m=+1760.089415205" Dec 08 21:48:04 crc kubenswrapper[4791]: I1208 21:48:04.835824 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:48:04 crc kubenswrapper[4791]: I1208 21:48:04.835875 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:48:04 crc kubenswrapper[4791]: I1208 21:48:04.897891 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:48:14 crc kubenswrapper[4791]: I1208 21:48:14.598155 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:48:14 crc kubenswrapper[4791]: E1208 21:48:14.598741 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:48:14 crc kubenswrapper[4791]: I1208 21:48:14.889659 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 21:48:14 crc kubenswrapper[4791]: I1208 21:48:14.978073 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qh8xd"] Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.040558 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hv7kc"] Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.040831 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hv7kc" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="registry-server" containerID="cri-o://8f536c3aab16a1971f93f7d70f982a5751006a8bb30e7df4919931e23bd4191e" gracePeriod=2 Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.235836 4791 scope.go:117] "RemoveContainer" containerID="8c065d659584282082b219bec1e800a937d6e54e4448977fe6cd1ee556e5e891" Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.500376 4791 generic.go:334] "Generic (PLEG): container finished" podID="5f13498f-b751-484e-bfd1-8ea09222f482" containerID="8f536c3aab16a1971f93f7d70f982a5751006a8bb30e7df4919931e23bd4191e" exitCode=0 Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.501956 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hv7kc" event={"ID":"5f13498f-b751-484e-bfd1-8ea09222f482","Type":"ContainerDied","Data":"8f536c3aab16a1971f93f7d70f982a5751006a8bb30e7df4919931e23bd4191e"} Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.646606 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.835014 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-utilities\") pod \"5f13498f-b751-484e-bfd1-8ea09222f482\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.835116 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfh4d\" (UniqueName: \"kubernetes.io/projected/5f13498f-b751-484e-bfd1-8ea09222f482-kube-api-access-vfh4d\") pod \"5f13498f-b751-484e-bfd1-8ea09222f482\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.835148 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-catalog-content\") pod \"5f13498f-b751-484e-bfd1-8ea09222f482\" (UID: \"5f13498f-b751-484e-bfd1-8ea09222f482\") " Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.836311 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-utilities" (OuterVolumeSpecName: "utilities") pod "5f13498f-b751-484e-bfd1-8ea09222f482" (UID: "5f13498f-b751-484e-bfd1-8ea09222f482"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.841476 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f13498f-b751-484e-bfd1-8ea09222f482-kube-api-access-vfh4d" (OuterVolumeSpecName: "kube-api-access-vfh4d") pod "5f13498f-b751-484e-bfd1-8ea09222f482" (UID: "5f13498f-b751-484e-bfd1-8ea09222f482"). InnerVolumeSpecName "kube-api-access-vfh4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.891423 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f13498f-b751-484e-bfd1-8ea09222f482" (UID: "5f13498f-b751-484e-bfd1-8ea09222f482"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.944692 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.945131 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfh4d\" (UniqueName: \"kubernetes.io/projected/5f13498f-b751-484e-bfd1-8ea09222f482-kube-api-access-vfh4d\") on node \"crc\" DevicePath \"\"" Dec 08 21:48:15 crc kubenswrapper[4791]: I1208 21:48:15.945182 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f13498f-b751-484e-bfd1-8ea09222f482-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:48:16 crc kubenswrapper[4791]: I1208 21:48:16.517583 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hv7kc" event={"ID":"5f13498f-b751-484e-bfd1-8ea09222f482","Type":"ContainerDied","Data":"77790f0f558c386e7a0c2c13e5810c876f4df0d3b737fdf5648ee08462c76acd"} Dec 08 21:48:16 crc kubenswrapper[4791]: I1208 21:48:16.517658 4791 scope.go:117] "RemoveContainer" containerID="8f536c3aab16a1971f93f7d70f982a5751006a8bb30e7df4919931e23bd4191e" Dec 08 21:48:16 crc kubenswrapper[4791]: I1208 21:48:16.517939 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hv7kc" Dec 08 21:48:16 crc kubenswrapper[4791]: I1208 21:48:16.574508 4791 scope.go:117] "RemoveContainer" containerID="13308b864af5eaddab524110c185d0f7e3e03872bd6166197438c0f467042a18" Dec 08 21:48:16 crc kubenswrapper[4791]: I1208 21:48:16.593907 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hv7kc"] Dec 08 21:48:16 crc kubenswrapper[4791]: I1208 21:48:16.604119 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hv7kc"] Dec 08 21:48:16 crc kubenswrapper[4791]: I1208 21:48:16.678068 4791 scope.go:117] "RemoveContainer" containerID="13cba54449fb93ad0da3c0d93d5ab707b4971d48a64486e46b6ba1036aac106c" Dec 08 21:48:17 crc kubenswrapper[4791]: I1208 21:48:17.610682 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" path="/var/lib/kubelet/pods/5f13498f-b751-484e-bfd1-8ea09222f482/volumes" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.185775 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.186606 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="79f21b28-25e8-4260-a133-910ab353ed8c" containerName="openstackclient" containerID="cri-o://ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82" gracePeriod=2 Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.198318 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.224630 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 08 21:48:25 crc kubenswrapper[4791]: E1208 21:48:25.225157 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="registry-server" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.225174 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="registry-server" Dec 08 21:48:25 crc kubenswrapper[4791]: E1208 21:48:25.225207 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79f21b28-25e8-4260-a133-910ab353ed8c" containerName="openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.225216 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="79f21b28-25e8-4260-a133-910ab353ed8c" containerName="openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: E1208 21:48:25.225228 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="extract-content" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.225234 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="extract-content" Dec 08 21:48:25 crc kubenswrapper[4791]: E1208 21:48:25.225256 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="extract-utilities" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.225262 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="extract-utilities" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.225478 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f13498f-b751-484e-bfd1-8ea09222f482" containerName="registry-server" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.225493 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="79f21b28-25e8-4260-a133-910ab353ed8c" containerName="openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.226351 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.229328 4791 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="79f21b28-25e8-4260-a133-910ab353ed8c" podUID="45a88144-9574-4095-9e4b-3cc3bc138670" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.237826 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.387302 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/45a88144-9574-4095-9e4b-3cc3bc138670-openstack-config\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.387739 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/45a88144-9574-4095-9e4b-3cc3bc138670-openstack-config-secret\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.387987 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kngft\" (UniqueName: \"kubernetes.io/projected/45a88144-9574-4095-9e4b-3cc3bc138670-kube-api-access-kngft\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.388051 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45a88144-9574-4095-9e4b-3cc3bc138670-combined-ca-bundle\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.489919 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/45a88144-9574-4095-9e4b-3cc3bc138670-openstack-config-secret\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.490092 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kngft\" (UniqueName: \"kubernetes.io/projected/45a88144-9574-4095-9e4b-3cc3bc138670-kube-api-access-kngft\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.490133 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45a88144-9574-4095-9e4b-3cc3bc138670-combined-ca-bundle\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.490220 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/45a88144-9574-4095-9e4b-3cc3bc138670-openstack-config\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.491289 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/45a88144-9574-4095-9e4b-3cc3bc138670-openstack-config\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.497475 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/45a88144-9574-4095-9e4b-3cc3bc138670-openstack-config-secret\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.497728 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45a88144-9574-4095-9e4b-3cc3bc138670-combined-ca-bundle\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.520084 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kngft\" (UniqueName: \"kubernetes.io/projected/45a88144-9574-4095-9e4b-3cc3bc138670-kube-api-access-kngft\") pod \"openstackclient\" (UID: \"45a88144-9574-4095-9e4b-3cc3bc138670\") " pod="openstack/openstackclient" Dec 08 21:48:25 crc kubenswrapper[4791]: I1208 21:48:25.544317 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:48:26 crc kubenswrapper[4791]: I1208 21:48:26.231185 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 08 21:48:26 crc kubenswrapper[4791]: I1208 21:48:26.741891 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"45a88144-9574-4095-9e4b-3cc3bc138670","Type":"ContainerStarted","Data":"3220f8df3cc0aca8723ada508a0ee80deadcc42742a3d72484aa082c3671b678"} Dec 08 21:48:26 crc kubenswrapper[4791]: I1208 21:48:26.742205 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"45a88144-9574-4095-9e4b-3cc3bc138670","Type":"ContainerStarted","Data":"84cebc9c517a3b466247bb04fb35bc4c8d32b66db9b245ead7b055d44be02f7d"} Dec 08 21:48:26 crc kubenswrapper[4791]: I1208 21:48:26.774753 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.77469142 podStartE2EDuration="1.77469142s" podCreationTimestamp="2025-12-08 21:48:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:48:26.759573395 +0000 UTC m=+1783.458331750" watchObservedRunningTime="2025-12-08 21:48:26.77469142 +0000 UTC m=+1783.473449765" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.572785 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.754073 4791 generic.go:334] "Generic (PLEG): container finished" podID="79f21b28-25e8-4260-a133-910ab353ed8c" containerID="ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82" exitCode=137 Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.755265 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7twqc\" (UniqueName: \"kubernetes.io/projected/79f21b28-25e8-4260-a133-910ab353ed8c-kube-api-access-7twqc\") pod \"79f21b28-25e8-4260-a133-910ab353ed8c\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.755298 4791 scope.go:117] "RemoveContainer" containerID="ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.755306 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config-secret\") pod \"79f21b28-25e8-4260-a133-910ab353ed8c\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.755422 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-combined-ca-bundle\") pod \"79f21b28-25e8-4260-a133-910ab353ed8c\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.755642 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config\") pod \"79f21b28-25e8-4260-a133-910ab353ed8c\" (UID: \"79f21b28-25e8-4260-a133-910ab353ed8c\") " Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.755273 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.761781 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79f21b28-25e8-4260-a133-910ab353ed8c-kube-api-access-7twqc" (OuterVolumeSpecName: "kube-api-access-7twqc") pod "79f21b28-25e8-4260-a133-910ab353ed8c" (UID: "79f21b28-25e8-4260-a133-910ab353ed8c"). InnerVolumeSpecName "kube-api-access-7twqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.787500 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "79f21b28-25e8-4260-a133-910ab353ed8c" (UID: "79f21b28-25e8-4260-a133-910ab353ed8c"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.791242 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79f21b28-25e8-4260-a133-910ab353ed8c" (UID: "79f21b28-25e8-4260-a133-910ab353ed8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.816915 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "79f21b28-25e8-4260-a133-910ab353ed8c" (UID: "79f21b28-25e8-4260-a133-910ab353ed8c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.858318 4791 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.858353 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7twqc\" (UniqueName: \"kubernetes.io/projected/79f21b28-25e8-4260-a133-910ab353ed8c-kube-api-access-7twqc\") on node \"crc\" DevicePath \"\"" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.858364 4791 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.858374 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79f21b28-25e8-4260-a133-910ab353ed8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.906195 4791 scope.go:117] "RemoveContainer" containerID="ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82" Dec 08 21:48:27 crc kubenswrapper[4791]: E1208 21:48:27.906690 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82\": container with ID starting with ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82 not found: ID does not exist" containerID="ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82" Dec 08 21:48:27 crc kubenswrapper[4791]: I1208 21:48:27.906749 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82"} err="failed to get container status \"ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82\": rpc error: code = NotFound desc = could not find container \"ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82\": container with ID starting with ae8fc77d564d7e978eb8595534e08e2e8c13bbb39688276f799efc8b27583a82 not found: ID does not exist" Dec 08 21:48:28 crc kubenswrapper[4791]: I1208 21:48:28.598793 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:48:28 crc kubenswrapper[4791]: E1208 21:48:28.599128 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:48:29 crc kubenswrapper[4791]: I1208 21:48:29.613168 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79f21b28-25e8-4260-a133-910ab353ed8c" path="/var/lib/kubelet/pods/79f21b28-25e8-4260-a133-910ab353ed8c/volumes" Dec 08 21:48:40 crc kubenswrapper[4791]: I1208 21:48:40.598107 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:48:40 crc kubenswrapper[4791]: E1208 21:48:40.599047 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:48:55 crc kubenswrapper[4791]: I1208 21:48:55.598397 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:48:55 crc kubenswrapper[4791]: E1208 21:48:55.599484 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:49:08 crc kubenswrapper[4791]: I1208 21:49:08.598697 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:49:08 crc kubenswrapper[4791]: E1208 21:49:08.599619 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:49:15 crc kubenswrapper[4791]: I1208 21:49:15.339080 4791 scope.go:117] "RemoveContainer" containerID="28f3ae21f34970db2a35f9c59baa5bce65552ee05ceab28549c32f60c93f9456" Dec 08 21:49:20 crc kubenswrapper[4791]: I1208 21:49:20.598648 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:49:20 crc kubenswrapper[4791]: E1208 21:49:20.600120 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:49:35 crc kubenswrapper[4791]: I1208 21:49:35.598385 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:49:35 crc kubenswrapper[4791]: E1208 21:49:35.599256 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:49:50 crc kubenswrapper[4791]: I1208 21:49:50.598903 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:49:50 crc kubenswrapper[4791]: E1208 21:49:50.600057 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:50:03 crc kubenswrapper[4791]: I1208 21:50:03.609929 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:50:03 crc kubenswrapper[4791]: E1208 21:50:03.610747 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:50:15 crc kubenswrapper[4791]: I1208 21:50:15.598198 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:50:15 crc kubenswrapper[4791]: E1208 21:50:15.599230 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:50:24 crc kubenswrapper[4791]: I1208 21:50:24.196693 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4" exitCode=1 Dec 08 21:50:24 crc kubenswrapper[4791]: I1208 21:50:24.196795 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4"} Dec 08 21:50:24 crc kubenswrapper[4791]: I1208 21:50:24.197322 4791 scope.go:117] "RemoveContainer" containerID="a55a5aa481da4ab0be91afeeb5b363a463c1bac033f05bd23e2c0231f4f44330" Dec 08 21:50:24 crc kubenswrapper[4791]: I1208 21:50:24.199232 4791 scope.go:117] "RemoveContainer" containerID="72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4" Dec 08 21:50:24 crc kubenswrapper[4791]: E1208 21:50:24.199996 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:50:25 crc kubenswrapper[4791]: I1208 21:50:25.884802 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:50:25 crc kubenswrapper[4791]: I1208 21:50:25.885356 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:50:25 crc kubenswrapper[4791]: I1208 21:50:25.886532 4791 scope.go:117] "RemoveContainer" containerID="72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4" Dec 08 21:50:25 crc kubenswrapper[4791]: E1208 21:50:25.887003 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:50:26 crc kubenswrapper[4791]: I1208 21:50:26.598340 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:50:26 crc kubenswrapper[4791]: E1208 21:50:26.598662 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:50:38 crc kubenswrapper[4791]: I1208 21:50:38.598385 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:50:39 crc kubenswrapper[4791]: I1208 21:50:39.374161 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"cb2daff64a39d72de0b926c6af030bdc8c907c270ddf6c6e8fd0e05a72b32d20"} Dec 08 21:50:39 crc kubenswrapper[4791]: I1208 21:50:39.598324 4791 scope.go:117] "RemoveContainer" containerID="72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4" Dec 08 21:50:39 crc kubenswrapper[4791]: E1208 21:50:39.598631 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:50:50 crc kubenswrapper[4791]: I1208 21:50:50.597869 4791 scope.go:117] "RemoveContainer" containerID="72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4" Dec 08 21:50:50 crc kubenswrapper[4791]: E1208 21:50:50.598772 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:51:04 crc kubenswrapper[4791]: I1208 21:51:04.599415 4791 scope.go:117] "RemoveContainer" containerID="72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4" Dec 08 21:51:05 crc kubenswrapper[4791]: I1208 21:51:05.662765 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618"} Dec 08 21:51:05 crc kubenswrapper[4791]: I1208 21:51:05.663495 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.538050 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2v52t"] Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.548464 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.558582 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-catalog-content\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.558832 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrhh9\" (UniqueName: \"kubernetes.io/projected/d016b3df-c60d-458b-9f7f-9d395a90523a-kube-api-access-jrhh9\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.558898 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-utilities\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.637847 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2v52t"] Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.660534 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrhh9\" (UniqueName: \"kubernetes.io/projected/d016b3df-c60d-458b-9f7f-9d395a90523a-kube-api-access-jrhh9\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.660616 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-utilities\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.660894 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-catalog-content\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.661247 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-utilities\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.661569 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-catalog-content\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.696314 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrhh9\" (UniqueName: \"kubernetes.io/projected/d016b3df-c60d-458b-9f7f-9d395a90523a-kube-api-access-jrhh9\") pod \"redhat-marketplace-2v52t\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:09 crc kubenswrapper[4791]: I1208 21:51:09.937010 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:10 crc kubenswrapper[4791]: I1208 21:51:10.630679 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2v52t"] Dec 08 21:51:10 crc kubenswrapper[4791]: I1208 21:51:10.844956 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2v52t" event={"ID":"d016b3df-c60d-458b-9f7f-9d395a90523a","Type":"ContainerStarted","Data":"e304ebe36f0f6210ec1637e9a5aeafc38868b665adece337165e105ed0d3c7c6"} Dec 08 21:51:11 crc kubenswrapper[4791]: I1208 21:51:11.857397 4791 generic.go:334] "Generic (PLEG): container finished" podID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerID="cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712" exitCode=0 Dec 08 21:51:11 crc kubenswrapper[4791]: I1208 21:51:11.857490 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2v52t" event={"ID":"d016b3df-c60d-458b-9f7f-9d395a90523a","Type":"ContainerDied","Data":"cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712"} Dec 08 21:51:12 crc kubenswrapper[4791]: I1208 21:51:12.872047 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2v52t" event={"ID":"d016b3df-c60d-458b-9f7f-9d395a90523a","Type":"ContainerStarted","Data":"c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04"} Dec 08 21:51:13 crc kubenswrapper[4791]: I1208 21:51:13.885172 4791 generic.go:334] "Generic (PLEG): container finished" podID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerID="c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04" exitCode=0 Dec 08 21:51:13 crc kubenswrapper[4791]: I1208 21:51:13.885367 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2v52t" event={"ID":"d016b3df-c60d-458b-9f7f-9d395a90523a","Type":"ContainerDied","Data":"c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04"} Dec 08 21:51:14 crc kubenswrapper[4791]: I1208 21:51:14.903449 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2v52t" event={"ID":"d016b3df-c60d-458b-9f7f-9d395a90523a","Type":"ContainerStarted","Data":"b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920"} Dec 08 21:51:14 crc kubenswrapper[4791]: I1208 21:51:14.943928 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2v52t" podStartSLOduration=3.525338396 podStartE2EDuration="5.943908475s" podCreationTimestamp="2025-12-08 21:51:09 +0000 UTC" firstStartedPulling="2025-12-08 21:51:11.859901038 +0000 UTC m=+1948.558659383" lastFinishedPulling="2025-12-08 21:51:14.278471107 +0000 UTC m=+1950.977229462" observedRunningTime="2025-12-08 21:51:14.940230403 +0000 UTC m=+1951.638988768" watchObservedRunningTime="2025-12-08 21:51:14.943908475 +0000 UTC m=+1951.642666810" Dec 08 21:51:15 crc kubenswrapper[4791]: I1208 21:51:15.474379 4791 scope.go:117] "RemoveContainer" containerID="0ea6761a76e32dcc035e62e1153dd1391d0bd691a64b982d01b6f0052ff68e97" Dec 08 21:51:15 crc kubenswrapper[4791]: I1208 21:51:15.497854 4791 scope.go:117] "RemoveContainer" containerID="a1fe9235dcf4d1679495f5659b82e903935c2aba259f15c6a38c653ec330afa7" Dec 08 21:51:15 crc kubenswrapper[4791]: I1208 21:51:15.887101 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:51:19 crc kubenswrapper[4791]: I1208 21:51:19.937969 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:19 crc kubenswrapper[4791]: I1208 21:51:19.938377 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:20 crc kubenswrapper[4791]: I1208 21:51:20.375645 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:20 crc kubenswrapper[4791]: I1208 21:51:20.505439 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:20 crc kubenswrapper[4791]: I1208 21:51:20.658632 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2v52t"] Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.352425 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2v52t" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="registry-server" containerID="cri-o://b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920" gracePeriod=2 Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.917196 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.948922 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-catalog-content\") pod \"d016b3df-c60d-458b-9f7f-9d395a90523a\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.949045 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-utilities\") pod \"d016b3df-c60d-458b-9f7f-9d395a90523a\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.949282 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrhh9\" (UniqueName: \"kubernetes.io/projected/d016b3df-c60d-458b-9f7f-9d395a90523a-kube-api-access-jrhh9\") pod \"d016b3df-c60d-458b-9f7f-9d395a90523a\" (UID: \"d016b3df-c60d-458b-9f7f-9d395a90523a\") " Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.950244 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-utilities" (OuterVolumeSpecName: "utilities") pod "d016b3df-c60d-458b-9f7f-9d395a90523a" (UID: "d016b3df-c60d-458b-9f7f-9d395a90523a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.962008 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d016b3df-c60d-458b-9f7f-9d395a90523a-kube-api-access-jrhh9" (OuterVolumeSpecName: "kube-api-access-jrhh9") pod "d016b3df-c60d-458b-9f7f-9d395a90523a" (UID: "d016b3df-c60d-458b-9f7f-9d395a90523a"). InnerVolumeSpecName "kube-api-access-jrhh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:51:22 crc kubenswrapper[4791]: I1208 21:51:22.976035 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d016b3df-c60d-458b-9f7f-9d395a90523a" (UID: "d016b3df-c60d-458b-9f7f-9d395a90523a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.305190 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrhh9\" (UniqueName: \"kubernetes.io/projected/d016b3df-c60d-458b-9f7f-9d395a90523a-kube-api-access-jrhh9\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.305222 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.305232 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d016b3df-c60d-458b-9f7f-9d395a90523a-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.367602 4791 generic.go:334] "Generic (PLEG): container finished" podID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerID="b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920" exitCode=0 Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.367659 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2v52t" event={"ID":"d016b3df-c60d-458b-9f7f-9d395a90523a","Type":"ContainerDied","Data":"b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920"} Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.367693 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2v52t" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.367841 4791 scope.go:117] "RemoveContainer" containerID="b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.367736 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2v52t" event={"ID":"d016b3df-c60d-458b-9f7f-9d395a90523a","Type":"ContainerDied","Data":"e304ebe36f0f6210ec1637e9a5aeafc38868b665adece337165e105ed0d3c7c6"} Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.395790 4791 scope.go:117] "RemoveContainer" containerID="c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.424391 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2v52t"] Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.429150 4791 scope.go:117] "RemoveContainer" containerID="cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.446961 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2v52t"] Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.480084 4791 scope.go:117] "RemoveContainer" containerID="b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920" Dec 08 21:51:23 crc kubenswrapper[4791]: E1208 21:51:23.481210 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920\": container with ID starting with b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920 not found: ID does not exist" containerID="b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.481255 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920"} err="failed to get container status \"b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920\": rpc error: code = NotFound desc = could not find container \"b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920\": container with ID starting with b0edd8879019723f6695ac4b504cfd922a810b4f54399c630b1151c89e6b7920 not found: ID does not exist" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.481285 4791 scope.go:117] "RemoveContainer" containerID="c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04" Dec 08 21:51:23 crc kubenswrapper[4791]: E1208 21:51:23.481673 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04\": container with ID starting with c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04 not found: ID does not exist" containerID="c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.481737 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04"} err="failed to get container status \"c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04\": rpc error: code = NotFound desc = could not find container \"c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04\": container with ID starting with c8e68e9ce37335cb646aa208a0c9d6b1a654f1c6a405e3fba8b76e5ab1960d04 not found: ID does not exist" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.481769 4791 scope.go:117] "RemoveContainer" containerID="cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712" Dec 08 21:51:23 crc kubenswrapper[4791]: E1208 21:51:23.482223 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712\": container with ID starting with cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712 not found: ID does not exist" containerID="cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.482297 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712"} err="failed to get container status \"cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712\": rpc error: code = NotFound desc = could not find container \"cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712\": container with ID starting with cc13cbe0e9cc6cee323c667c8705b090a71ec3976e0b7d14c8c1d0c58fded712 not found: ID does not exist" Dec 08 21:51:23 crc kubenswrapper[4791]: I1208 21:51:23.612034 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" path="/var/lib/kubelet/pods/d016b3df-c60d-458b-9f7f-9d395a90523a/volumes" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.164404 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fhw6v"] Dec 08 21:51:28 crc kubenswrapper[4791]: E1208 21:51:28.165583 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="extract-utilities" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.165604 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="extract-utilities" Dec 08 21:51:28 crc kubenswrapper[4791]: E1208 21:51:28.165643 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="extract-content" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.165652 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="extract-content" Dec 08 21:51:28 crc kubenswrapper[4791]: E1208 21:51:28.165670 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="registry-server" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.165679 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="registry-server" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.166058 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d016b3df-c60d-458b-9f7f-9d395a90523a" containerName="registry-server" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.168627 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.183072 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fhw6v"] Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.298924 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjdln\" (UniqueName: \"kubernetes.io/projected/d82b996f-b06c-4ac1-970b-03382b5f5462-kube-api-access-gjdln\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.299012 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-catalog-content\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.299261 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-utilities\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.401603 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjdln\" (UniqueName: \"kubernetes.io/projected/d82b996f-b06c-4ac1-970b-03382b5f5462-kube-api-access-gjdln\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.401681 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-catalog-content\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.401738 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-utilities\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.402334 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-utilities\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.402456 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-catalog-content\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.433974 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjdln\" (UniqueName: \"kubernetes.io/projected/d82b996f-b06c-4ac1-970b-03382b5f5462-kube-api-access-gjdln\") pod \"redhat-operators-fhw6v\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:28 crc kubenswrapper[4791]: I1208 21:51:28.493169 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:29 crc kubenswrapper[4791]: I1208 21:51:29.030889 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fhw6v"] Dec 08 21:51:29 crc kubenswrapper[4791]: E1208 21:51:29.464233 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd82b996f_b06c_4ac1_970b_03382b5f5462.slice/crio-8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:51:29 crc kubenswrapper[4791]: I1208 21:51:29.552568 4791 generic.go:334] "Generic (PLEG): container finished" podID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerID="8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e" exitCode=0 Dec 08 21:51:29 crc kubenswrapper[4791]: I1208 21:51:29.552610 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fhw6v" event={"ID":"d82b996f-b06c-4ac1-970b-03382b5f5462","Type":"ContainerDied","Data":"8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e"} Dec 08 21:51:29 crc kubenswrapper[4791]: I1208 21:51:29.552637 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fhw6v" event={"ID":"d82b996f-b06c-4ac1-970b-03382b5f5462","Type":"ContainerStarted","Data":"c5e5f367d83d0d1340811d48f4cb5037135759d783f4099233b6329c631b667a"} Dec 08 21:51:30 crc kubenswrapper[4791]: I1208 21:51:30.564688 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fhw6v" event={"ID":"d82b996f-b06c-4ac1-970b-03382b5f5462","Type":"ContainerStarted","Data":"59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461"} Dec 08 21:51:35 crc kubenswrapper[4791]: I1208 21:51:35.012372 4791 generic.go:334] "Generic (PLEG): container finished" podID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerID="59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461" exitCode=0 Dec 08 21:51:35 crc kubenswrapper[4791]: I1208 21:51:35.012439 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fhw6v" event={"ID":"d82b996f-b06c-4ac1-970b-03382b5f5462","Type":"ContainerDied","Data":"59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461"} Dec 08 21:51:36 crc kubenswrapper[4791]: I1208 21:51:36.189138 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fhw6v" event={"ID":"d82b996f-b06c-4ac1-970b-03382b5f5462","Type":"ContainerStarted","Data":"fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827"} Dec 08 21:51:36 crc kubenswrapper[4791]: I1208 21:51:36.211996 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fhw6v" podStartSLOduration=2.216989619 podStartE2EDuration="8.211974566s" podCreationTimestamp="2025-12-08 21:51:28 +0000 UTC" firstStartedPulling="2025-12-08 21:51:29.554444864 +0000 UTC m=+1966.253203219" lastFinishedPulling="2025-12-08 21:51:35.549429801 +0000 UTC m=+1972.248188166" observedRunningTime="2025-12-08 21:51:36.209099664 +0000 UTC m=+1972.907858029" watchObservedRunningTime="2025-12-08 21:51:36.211974566 +0000 UTC m=+1972.910732911" Dec 08 21:51:38 crc kubenswrapper[4791]: I1208 21:51:38.494378 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:38 crc kubenswrapper[4791]: I1208 21:51:38.494993 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:39 crc kubenswrapper[4791]: I1208 21:51:39.543733 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fhw6v" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="registry-server" probeResult="failure" output=< Dec 08 21:51:39 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 21:51:39 crc kubenswrapper[4791]: > Dec 08 21:51:44 crc kubenswrapper[4791]: I1208 21:51:44.051275 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-c9b8-account-create-update-wpqnb"] Dec 08 21:51:44 crc kubenswrapper[4791]: I1208 21:51:44.063685 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-c9b8-account-create-update-wpqnb"] Dec 08 21:51:45 crc kubenswrapper[4791]: I1208 21:51:45.033870 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-556rp"] Dec 08 21:51:45 crc kubenswrapper[4791]: I1208 21:51:45.046996 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-556rp"] Dec 08 21:51:45 crc kubenswrapper[4791]: I1208 21:51:45.612775 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f9e04e6-b15a-48ff-8836-e325232fdf81" path="/var/lib/kubelet/pods/9f9e04e6-b15a-48ff-8836-e325232fdf81/volumes" Dec 08 21:51:45 crc kubenswrapper[4791]: I1208 21:51:45.613650 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8d6e5db-3948-4362-92ec-b2050a5686f0" path="/var/lib/kubelet/pods/b8d6e5db-3948-4362-92ec-b2050a5686f0/volumes" Dec 08 21:51:48 crc kubenswrapper[4791]: I1208 21:51:48.034958 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-f129-account-create-update-6597r"] Dec 08 21:51:48 crc kubenswrapper[4791]: I1208 21:51:48.047797 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-2gfz5"] Dec 08 21:51:48 crc kubenswrapper[4791]: I1208 21:51:48.058110 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-f129-account-create-update-6597r"] Dec 08 21:51:48 crc kubenswrapper[4791]: I1208 21:51:48.068223 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-2gfz5"] Dec 08 21:51:48 crc kubenswrapper[4791]: I1208 21:51:48.557298 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:48 crc kubenswrapper[4791]: I1208 21:51:48.608786 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:48 crc kubenswrapper[4791]: I1208 21:51:48.798562 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fhw6v"] Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.046827 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-cd7e-account-create-update-92q4x"] Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.060824 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-x6ghn"] Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.071630 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-x6ghn"] Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.081349 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-cd7e-account-create-update-92q4x"] Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.611314 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e800e49-dab4-40d0-a626-0e6f7a62ed50" path="/var/lib/kubelet/pods/0e800e49-dab4-40d0-a626-0e6f7a62ed50/volumes" Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.612172 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="291278e1-f793-4fe7-bf0e-63279ac0ba7d" path="/var/lib/kubelet/pods/291278e1-f793-4fe7-bf0e-63279ac0ba7d/volumes" Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.612926 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="492c6838-316b-4f0a-8115-3ba1b4b05ce2" path="/var/lib/kubelet/pods/492c6838-316b-4f0a-8115-3ba1b4b05ce2/volumes" Dec 08 21:51:49 crc kubenswrapper[4791]: I1208 21:51:49.613501 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51d67e99-957e-4645-9a0b-243cdc7e8369" path="/var/lib/kubelet/pods/51d67e99-957e-4645-9a0b-243cdc7e8369/volumes" Dec 08 21:51:50 crc kubenswrapper[4791]: I1208 21:51:50.340860 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fhw6v" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="registry-server" containerID="cri-o://fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827" gracePeriod=2 Dec 08 21:51:50 crc kubenswrapper[4791]: I1208 21:51:50.873834 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:50 crc kubenswrapper[4791]: I1208 21:51:50.993882 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-utilities\") pod \"d82b996f-b06c-4ac1-970b-03382b5f5462\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " Dec 08 21:51:50 crc kubenswrapper[4791]: I1208 21:51:50.994117 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjdln\" (UniqueName: \"kubernetes.io/projected/d82b996f-b06c-4ac1-970b-03382b5f5462-kube-api-access-gjdln\") pod \"d82b996f-b06c-4ac1-970b-03382b5f5462\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " Dec 08 21:51:50 crc kubenswrapper[4791]: I1208 21:51:50.994985 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-catalog-content\") pod \"d82b996f-b06c-4ac1-970b-03382b5f5462\" (UID: \"d82b996f-b06c-4ac1-970b-03382b5f5462\") " Dec 08 21:51:50 crc kubenswrapper[4791]: I1208 21:51:50.995072 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-utilities" (OuterVolumeSpecName: "utilities") pod "d82b996f-b06c-4ac1-970b-03382b5f5462" (UID: "d82b996f-b06c-4ac1-970b-03382b5f5462"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:51:50 crc kubenswrapper[4791]: I1208 21:51:50.995482 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.002300 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d82b996f-b06c-4ac1-970b-03382b5f5462-kube-api-access-gjdln" (OuterVolumeSpecName: "kube-api-access-gjdln") pod "d82b996f-b06c-4ac1-970b-03382b5f5462" (UID: "d82b996f-b06c-4ac1-970b-03382b5f5462"). InnerVolumeSpecName "kube-api-access-gjdln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.099177 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjdln\" (UniqueName: \"kubernetes.io/projected/d82b996f-b06c-4ac1-970b-03382b5f5462-kube-api-access-gjdln\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.105516 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d82b996f-b06c-4ac1-970b-03382b5f5462" (UID: "d82b996f-b06c-4ac1-970b-03382b5f5462"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.204419 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b996f-b06c-4ac1-970b-03382b5f5462-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.353171 4791 generic.go:334] "Generic (PLEG): container finished" podID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerID="fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827" exitCode=0 Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.353220 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fhw6v" event={"ID":"d82b996f-b06c-4ac1-970b-03382b5f5462","Type":"ContainerDied","Data":"fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827"} Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.353251 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fhw6v" event={"ID":"d82b996f-b06c-4ac1-970b-03382b5f5462","Type":"ContainerDied","Data":"c5e5f367d83d0d1340811d48f4cb5037135759d783f4099233b6329c631b667a"} Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.353269 4791 scope.go:117] "RemoveContainer" containerID="fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.353435 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fhw6v" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.393555 4791 scope.go:117] "RemoveContainer" containerID="59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.405371 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fhw6v"] Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.418937 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fhw6v"] Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.430451 4791 scope.go:117] "RemoveContainer" containerID="8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.489746 4791 scope.go:117] "RemoveContainer" containerID="fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827" Dec 08 21:51:51 crc kubenswrapper[4791]: E1208 21:51:51.490404 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827\": container with ID starting with fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827 not found: ID does not exist" containerID="fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.490448 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827"} err="failed to get container status \"fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827\": rpc error: code = NotFound desc = could not find container \"fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827\": container with ID starting with fd860405c88f4ee1290af54631675c7cf76bce392b00630e4824e9dbb034e827 not found: ID does not exist" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.490475 4791 scope.go:117] "RemoveContainer" containerID="59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461" Dec 08 21:51:51 crc kubenswrapper[4791]: E1208 21:51:51.491195 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461\": container with ID starting with 59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461 not found: ID does not exist" containerID="59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.491231 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461"} err="failed to get container status \"59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461\": rpc error: code = NotFound desc = could not find container \"59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461\": container with ID starting with 59a75ec6cd3092c5840316c728483e48bbd10b30ad8e665d02066560f9d2b461 not found: ID does not exist" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.491255 4791 scope.go:117] "RemoveContainer" containerID="8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e" Dec 08 21:51:51 crc kubenswrapper[4791]: E1208 21:51:51.491696 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e\": container with ID starting with 8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e not found: ID does not exist" containerID="8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.491745 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e"} err="failed to get container status \"8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e\": rpc error: code = NotFound desc = could not find container \"8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e\": container with ID starting with 8bf4aea93d35f7bf631e2de14ef5ed7d1961d44239fcce102450a07d7977647e not found: ID does not exist" Dec 08 21:51:51 crc kubenswrapper[4791]: I1208 21:51:51.610953 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" path="/var/lib/kubelet/pods/d82b996f-b06c-4ac1-970b-03382b5f5462/volumes" Dec 08 21:52:15 crc kubenswrapper[4791]: I1208 21:52:15.557602 4791 scope.go:117] "RemoveContainer" containerID="65065100fdcc2fa117536011ecc1157c1474025a10305b01ac141765c359b34a" Dec 08 21:52:15 crc kubenswrapper[4791]: I1208 21:52:15.592152 4791 scope.go:117] "RemoveContainer" containerID="c1674c2cc7dee6194b90bb7afa8bd4d796d4f1a615273870c9db4db8ef7f2ba1" Dec 08 21:52:15 crc kubenswrapper[4791]: I1208 21:52:15.672303 4791 scope.go:117] "RemoveContainer" containerID="02bf6659e9bf295b0760e012f5c47ed20f72cfb4a8fb0bf7369f5435dce676c5" Dec 08 21:52:15 crc kubenswrapper[4791]: I1208 21:52:15.745207 4791 scope.go:117] "RemoveContainer" containerID="83dde5224ff99f69fb62e6036f3a47833aef4697d449be86110c372cc3ef69a6" Dec 08 21:52:15 crc kubenswrapper[4791]: I1208 21:52:15.804420 4791 scope.go:117] "RemoveContainer" containerID="0fa51e101c8817a211840b54d2ccf86e077dcf3fc415d8850fa8f1fcd25615f1" Dec 08 21:52:15 crc kubenswrapper[4791]: I1208 21:52:15.864643 4791 scope.go:117] "RemoveContainer" containerID="41152df6293e432eda2c80f16af4bb9256bcfba8151cd03681546fd8c1f63b09" Dec 08 21:52:26 crc kubenswrapper[4791]: I1208 21:52:26.047168 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-d2jht"] Dec 08 21:52:26 crc kubenswrapper[4791]: I1208 21:52:26.059615 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-d2jht"] Dec 08 21:52:27 crc kubenswrapper[4791]: I1208 21:52:27.033242 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-5ad9-account-create-update-p2wjh"] Dec 08 21:52:27 crc kubenswrapper[4791]: I1208 21:52:27.048404 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-32a5-account-create-update-vpbkq"] Dec 08 21:52:27 crc kubenswrapper[4791]: I1208 21:52:27.059180 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-5ad9-account-create-update-p2wjh"] Dec 08 21:52:27 crc kubenswrapper[4791]: I1208 21:52:27.068889 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-32a5-account-create-update-vpbkq"] Dec 08 21:52:27 crc kubenswrapper[4791]: I1208 21:52:27.612427 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83b4f5fa-551e-4849-baf6-7afb53700f1d" path="/var/lib/kubelet/pods/83b4f5fa-551e-4849-baf6-7afb53700f1d/volumes" Dec 08 21:52:27 crc kubenswrapper[4791]: I1208 21:52:27.613103 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="944249f8-45ae-4247-b092-54a0a081df4e" path="/var/lib/kubelet/pods/944249f8-45ae-4247-b092-54a0a081df4e/volumes" Dec 08 21:52:27 crc kubenswrapper[4791]: I1208 21:52:27.613651 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b317f179-52ca-4d94-bd3a-c9cfd5096839" path="/var/lib/kubelet/pods/b317f179-52ca-4d94-bd3a-c9cfd5096839/volumes" Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.039146 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-efc3-account-create-update-hxj9q"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.050912 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-0f49-account-create-update-bxt2q"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.060831 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-rdkss"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.070565 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-9kxd9"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.079541 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-rdkss"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.088041 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-efc3-account-create-update-hxj9q"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.096543 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-9kxd9"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.105608 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-0f49-account-create-update-bxt2q"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.115144 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-5rt7m"] Dec 08 21:52:30 crc kubenswrapper[4791]: I1208 21:52:30.126655 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-5rt7m"] Dec 08 21:52:31 crc kubenswrapper[4791]: I1208 21:52:31.612560 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55f1c8c7-116e-4f7f-9b3e-b94c44b5a755" path="/var/lib/kubelet/pods/55f1c8c7-116e-4f7f-9b3e-b94c44b5a755/volumes" Dec 08 21:52:31 crc kubenswrapper[4791]: I1208 21:52:31.614003 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="759af777-0707-490e-87e1-6f15b83fbfa0" path="/var/lib/kubelet/pods/759af777-0707-490e-87e1-6f15b83fbfa0/volumes" Dec 08 21:52:31 crc kubenswrapper[4791]: I1208 21:52:31.615404 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1085ae8-9862-42a2-9c52-561c82c2e966" path="/var/lib/kubelet/pods/a1085ae8-9862-42a2-9c52-561c82c2e966/volumes" Dec 08 21:52:31 crc kubenswrapper[4791]: I1208 21:52:31.616371 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ceb03ddf-9eca-4760-82da-ef871c8f2af7" path="/var/lib/kubelet/pods/ceb03ddf-9eca-4760-82da-ef871c8f2af7/volumes" Dec 08 21:52:31 crc kubenswrapper[4791]: I1208 21:52:31.617220 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e35cda62-b9bc-4055-b831-2f8beb709d69" path="/var/lib/kubelet/pods/e35cda62-b9bc-4055-b831-2f8beb709d69/volumes" Dec 08 21:52:35 crc kubenswrapper[4791]: I1208 21:52:35.049736 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-xkfgv"] Dec 08 21:52:35 crc kubenswrapper[4791]: I1208 21:52:35.060523 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-xkfgv"] Dec 08 21:52:35 crc kubenswrapper[4791]: I1208 21:52:35.611028 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5e36e4f-4023-4a5a-9ae3-34cde7af452d" path="/var/lib/kubelet/pods/f5e36e4f-4023-4a5a-9ae3-34cde7af452d/volumes" Dec 08 21:52:37 crc kubenswrapper[4791]: I1208 21:52:37.042743 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-rrtnb"] Dec 08 21:52:37 crc kubenswrapper[4791]: I1208 21:52:37.056226 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-rrtnb"] Dec 08 21:52:37 crc kubenswrapper[4791]: I1208 21:52:37.613876 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e74124c9-f94a-4168-a9b1-dafbcb9e0f70" path="/var/lib/kubelet/pods/e74124c9-f94a-4168-a9b1-dafbcb9e0f70/volumes" Dec 08 21:53:05 crc kubenswrapper[4791]: I1208 21:53:05.251331 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:53:05 crc kubenswrapper[4791]: I1208 21:53:05.251873 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:53:10 crc kubenswrapper[4791]: I1208 21:53:10.044978 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-779pn"] Dec 08 21:53:10 crc kubenswrapper[4791]: I1208 21:53:10.058733 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-779pn"] Dec 08 21:53:11 crc kubenswrapper[4791]: I1208 21:53:11.613248 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd2c18de-3f41-4c59-b400-d96f39d28ec2" path="/var/lib/kubelet/pods/bd2c18de-3f41-4c59-b400-d96f39d28ec2/volumes" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.113099 4791 scope.go:117] "RemoveContainer" containerID="6113d19cb941d8a3159780009ba1b41b0740472cbf8e1e7276afac941cd5e104" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.136548 4791 scope.go:117] "RemoveContainer" containerID="0e8bd64f021b879cf2a2348f728c3648af903cf50353439466651a66f3406f8a" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.208311 4791 scope.go:117] "RemoveContainer" containerID="08feded04ccfa47f6af9d1157fda3fe4526d6b561c00654fcb81f240a08b959e" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.243773 4791 scope.go:117] "RemoveContainer" containerID="ae5fc73a6cfbfb0c28199a16ecc2d5338e32e1b0a02ce8a4d11e115f5935b3b7" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.433865 4791 scope.go:117] "RemoveContainer" containerID="edc00757880c8ddcd5ac505ae94b8ebe2c5f7cef390d22e63761fc535b22b679" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.481585 4791 scope.go:117] "RemoveContainer" containerID="fb861b6e2cc7f2c3e139c059bc20dfd39ee6ec7bfb8586207b3f23753a0d3e4e" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.528819 4791 scope.go:117] "RemoveContainer" containerID="f152d0c1aae53adec81b09eb831ae39d9dfc33aa763d64eb2b80a69b9cdddb0f" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.560680 4791 scope.go:117] "RemoveContainer" containerID="1c4479a48997ee5b056ce27a91b76ae350f75cca7e51b077b140e8c4bc508256" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.585024 4791 scope.go:117] "RemoveContainer" containerID="2e262a6f07eeebbe312adce5e28ff3bc470a5b43f6a4283da0d885b98006b53b" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.618016 4791 scope.go:117] "RemoveContainer" containerID="f5c5924f8877ffda7f15eca0220f57bb6107401463c671e23f93d215ee3416e6" Dec 08 21:53:16 crc kubenswrapper[4791]: I1208 21:53:16.640913 4791 scope.go:117] "RemoveContainer" containerID="11491c3efe1deaddedc84ca292aa8e162a34a9c2d29b24af5512a91cc2638753" Dec 08 21:53:17 crc kubenswrapper[4791]: I1208 21:53:17.043290 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-bt6fn"] Dec 08 21:53:17 crc kubenswrapper[4791]: I1208 21:53:17.078368 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-hdkvl"] Dec 08 21:53:17 crc kubenswrapper[4791]: I1208 21:53:17.089539 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-bt6fn"] Dec 08 21:53:17 crc kubenswrapper[4791]: I1208 21:53:17.099830 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-hdkvl"] Dec 08 21:53:17 crc kubenswrapper[4791]: I1208 21:53:17.612385 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="242da563-f632-4ba3-be9e-bd7d0376120d" path="/var/lib/kubelet/pods/242da563-f632-4ba3-be9e-bd7d0376120d/volumes" Dec 08 21:53:17 crc kubenswrapper[4791]: I1208 21:53:17.613070 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6d980c9-5283-4755-9cb5-5d86ed36edcf" path="/var/lib/kubelet/pods/a6d980c9-5283-4755-9cb5-5d86ed36edcf/volumes" Dec 08 21:53:19 crc kubenswrapper[4791]: I1208 21:53:19.032614 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-kfqgj"] Dec 08 21:53:19 crc kubenswrapper[4791]: I1208 21:53:19.045828 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-kfqgj"] Dec 08 21:53:19 crc kubenswrapper[4791]: I1208 21:53:19.613217 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e" path="/var/lib/kubelet/pods/4e82ae59-c73a-44bc-ac07-2c1dc3da2b0e/volumes" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.799526 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h8gkq"] Dec 08 21:53:21 crc kubenswrapper[4791]: E1208 21:53:21.800540 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="extract-utilities" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.800560 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="extract-utilities" Dec 08 21:53:21 crc kubenswrapper[4791]: E1208 21:53:21.800615 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="registry-server" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.800621 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="registry-server" Dec 08 21:53:21 crc kubenswrapper[4791]: E1208 21:53:21.800638 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="extract-content" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.800644 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="extract-content" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.800871 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d82b996f-b06c-4ac1-970b-03382b5f5462" containerName="registry-server" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.802443 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.815138 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h8gkq"] Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.990286 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgvwg\" (UniqueName: \"kubernetes.io/projected/e728dd26-8313-483e-b05e-97d9acf9f180-kube-api-access-vgvwg\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.990385 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-utilities\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:21 crc kubenswrapper[4791]: I1208 21:53:21.990551 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-catalog-content\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.092854 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgvwg\" (UniqueName: \"kubernetes.io/projected/e728dd26-8313-483e-b05e-97d9acf9f180-kube-api-access-vgvwg\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.092985 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-utilities\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.093131 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-catalog-content\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.093557 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-utilities\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.093674 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-catalog-content\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.120971 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgvwg\" (UniqueName: \"kubernetes.io/projected/e728dd26-8313-483e-b05e-97d9acf9f180-kube-api-access-vgvwg\") pod \"community-operators-h8gkq\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.157264 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:22 crc kubenswrapper[4791]: I1208 21:53:22.673157 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h8gkq"] Dec 08 21:53:23 crc kubenswrapper[4791]: I1208 21:53:23.398560 4791 generic.go:334] "Generic (PLEG): container finished" podID="e728dd26-8313-483e-b05e-97d9acf9f180" containerID="1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0" exitCode=0 Dec 08 21:53:23 crc kubenswrapper[4791]: I1208 21:53:23.398740 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8gkq" event={"ID":"e728dd26-8313-483e-b05e-97d9acf9f180","Type":"ContainerDied","Data":"1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0"} Dec 08 21:53:23 crc kubenswrapper[4791]: I1208 21:53:23.399046 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8gkq" event={"ID":"e728dd26-8313-483e-b05e-97d9acf9f180","Type":"ContainerStarted","Data":"763f6678e5dcc3c26f7cb4b3e177e8dd751265f683b3dd19645b1f3c9eca6201"} Dec 08 21:53:23 crc kubenswrapper[4791]: I1208 21:53:23.402076 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:53:24 crc kubenswrapper[4791]: I1208 21:53:24.409763 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8gkq" event={"ID":"e728dd26-8313-483e-b05e-97d9acf9f180","Type":"ContainerStarted","Data":"34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710"} Dec 08 21:53:26 crc kubenswrapper[4791]: I1208 21:53:26.433679 4791 generic.go:334] "Generic (PLEG): container finished" podID="e728dd26-8313-483e-b05e-97d9acf9f180" containerID="34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710" exitCode=0 Dec 08 21:53:26 crc kubenswrapper[4791]: I1208 21:53:26.433801 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8gkq" event={"ID":"e728dd26-8313-483e-b05e-97d9acf9f180","Type":"ContainerDied","Data":"34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710"} Dec 08 21:53:27 crc kubenswrapper[4791]: I1208 21:53:27.445790 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8gkq" event={"ID":"e728dd26-8313-483e-b05e-97d9acf9f180","Type":"ContainerStarted","Data":"ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f"} Dec 08 21:53:27 crc kubenswrapper[4791]: I1208 21:53:27.468017 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h8gkq" podStartSLOduration=3.018568606 podStartE2EDuration="6.467996326s" podCreationTimestamp="2025-12-08 21:53:21 +0000 UTC" firstStartedPulling="2025-12-08 21:53:23.4017691 +0000 UTC m=+2080.100527445" lastFinishedPulling="2025-12-08 21:53:26.85119682 +0000 UTC m=+2083.549955165" observedRunningTime="2025-12-08 21:53:27.466126939 +0000 UTC m=+2084.164885304" watchObservedRunningTime="2025-12-08 21:53:27.467996326 +0000 UTC m=+2084.166754671" Dec 08 21:53:29 crc kubenswrapper[4791]: I1208 21:53:29.036848 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-7srv6"] Dec 08 21:53:29 crc kubenswrapper[4791]: I1208 21:53:29.050820 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-7srv6"] Dec 08 21:53:29 crc kubenswrapper[4791]: I1208 21:53:29.621171 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9f87323-3041-444f-b26d-c76871bd426f" path="/var/lib/kubelet/pods/d9f87323-3041-444f-b26d-c76871bd426f/volumes" Dec 08 21:53:31 crc kubenswrapper[4791]: I1208 21:53:31.045632 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-5vvss"] Dec 08 21:53:31 crc kubenswrapper[4791]: I1208 21:53:31.058465 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-5vvss"] Dec 08 21:53:31 crc kubenswrapper[4791]: I1208 21:53:31.610678 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee824c0f-2eaa-4eee-8dcf-f487d9445012" path="/var/lib/kubelet/pods/ee824c0f-2eaa-4eee-8dcf-f487d9445012/volumes" Dec 08 21:53:32 crc kubenswrapper[4791]: I1208 21:53:32.157871 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:32 crc kubenswrapper[4791]: I1208 21:53:32.158156 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:32 crc kubenswrapper[4791]: I1208 21:53:32.249145 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:32 crc kubenswrapper[4791]: I1208 21:53:32.544545 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:32 crc kubenswrapper[4791]: I1208 21:53:32.592816 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h8gkq"] Dec 08 21:53:34 crc kubenswrapper[4791]: I1208 21:53:34.511194 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h8gkq" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="registry-server" containerID="cri-o://ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f" gracePeriod=2 Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.027214 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.147901 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgvwg\" (UniqueName: \"kubernetes.io/projected/e728dd26-8313-483e-b05e-97d9acf9f180-kube-api-access-vgvwg\") pod \"e728dd26-8313-483e-b05e-97d9acf9f180\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.148023 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-utilities\") pod \"e728dd26-8313-483e-b05e-97d9acf9f180\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.148069 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-catalog-content\") pod \"e728dd26-8313-483e-b05e-97d9acf9f180\" (UID: \"e728dd26-8313-483e-b05e-97d9acf9f180\") " Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.149327 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-utilities" (OuterVolumeSpecName: "utilities") pod "e728dd26-8313-483e-b05e-97d9acf9f180" (UID: "e728dd26-8313-483e-b05e-97d9acf9f180"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.154632 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e728dd26-8313-483e-b05e-97d9acf9f180-kube-api-access-vgvwg" (OuterVolumeSpecName: "kube-api-access-vgvwg") pod "e728dd26-8313-483e-b05e-97d9acf9f180" (UID: "e728dd26-8313-483e-b05e-97d9acf9f180"). InnerVolumeSpecName "kube-api-access-vgvwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.197330 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e728dd26-8313-483e-b05e-97d9acf9f180" (UID: "e728dd26-8313-483e-b05e-97d9acf9f180"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.250575 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgvwg\" (UniqueName: \"kubernetes.io/projected/e728dd26-8313-483e-b05e-97d9acf9f180-kube-api-access-vgvwg\") on node \"crc\" DevicePath \"\"" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.250614 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.250626 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e728dd26-8313-483e-b05e-97d9acf9f180-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.251141 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.251214 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.523283 4791 generic.go:334] "Generic (PLEG): container finished" podID="e728dd26-8313-483e-b05e-97d9acf9f180" containerID="ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f" exitCode=0 Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.523334 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8gkq" event={"ID":"e728dd26-8313-483e-b05e-97d9acf9f180","Type":"ContainerDied","Data":"ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f"} Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.523348 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8gkq" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.523372 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8gkq" event={"ID":"e728dd26-8313-483e-b05e-97d9acf9f180","Type":"ContainerDied","Data":"763f6678e5dcc3c26f7cb4b3e177e8dd751265f683b3dd19645b1f3c9eca6201"} Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.523392 4791 scope.go:117] "RemoveContainer" containerID="ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.548669 4791 scope.go:117] "RemoveContainer" containerID="34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.562109 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h8gkq"] Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.582128 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h8gkq"] Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.593918 4791 scope.go:117] "RemoveContainer" containerID="1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.616312 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" path="/var/lib/kubelet/pods/e728dd26-8313-483e-b05e-97d9acf9f180/volumes" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.626389 4791 scope.go:117] "RemoveContainer" containerID="ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f" Dec 08 21:53:35 crc kubenswrapper[4791]: E1208 21:53:35.627505 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f\": container with ID starting with ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f not found: ID does not exist" containerID="ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.627584 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f"} err="failed to get container status \"ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f\": rpc error: code = NotFound desc = could not find container \"ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f\": container with ID starting with ccbc790348d35c7e2e1507aa847cf56359fcb4f920ddfdf5b738a032ecdd681f not found: ID does not exist" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.627615 4791 scope.go:117] "RemoveContainer" containerID="34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710" Dec 08 21:53:35 crc kubenswrapper[4791]: E1208 21:53:35.628014 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710\": container with ID starting with 34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710 not found: ID does not exist" containerID="34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.628040 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710"} err="failed to get container status \"34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710\": rpc error: code = NotFound desc = could not find container \"34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710\": container with ID starting with 34bb845866bc6b94965a488c0cf118063cb7ee4fe4ddc7ca9a3ab8af9b990710 not found: ID does not exist" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.628056 4791 scope.go:117] "RemoveContainer" containerID="1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0" Dec 08 21:53:35 crc kubenswrapper[4791]: E1208 21:53:35.628315 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0\": container with ID starting with 1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0 not found: ID does not exist" containerID="1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0" Dec 08 21:53:35 crc kubenswrapper[4791]: I1208 21:53:35.628358 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0"} err="failed to get container status \"1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0\": rpc error: code = NotFound desc = could not find container \"1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0\": container with ID starting with 1f6a1576371e961364e8ac71e25c03b2ea431048d4281588838aecc6732042d0 not found: ID does not exist" Dec 08 21:53:40 crc kubenswrapper[4791]: I1208 21:53:40.582252 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" exitCode=1 Dec 08 21:53:40 crc kubenswrapper[4791]: I1208 21:53:40.582329 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618"} Dec 08 21:53:40 crc kubenswrapper[4791]: I1208 21:53:40.583974 4791 scope.go:117] "RemoveContainer" containerID="72c5b9066b49b4064aa5fce9d4fc4c8a56ca48bc6679405d1fe771c39f2212e4" Dec 08 21:53:40 crc kubenswrapper[4791]: I1208 21:53:40.585054 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:53:40 crc kubenswrapper[4791]: E1208 21:53:40.585554 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:53:45 crc kubenswrapper[4791]: I1208 21:53:45.885036 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:53:45 crc kubenswrapper[4791]: I1208 21:53:45.885556 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:53:45 crc kubenswrapper[4791]: I1208 21:53:45.886467 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:53:45 crc kubenswrapper[4791]: E1208 21:53:45.886819 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:53:57 crc kubenswrapper[4791]: I1208 21:53:57.598286 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:53:57 crc kubenswrapper[4791]: E1208 21:53:57.599128 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:54:00 crc kubenswrapper[4791]: I1208 21:54:00.045299 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-7j742"] Dec 08 21:54:00 crc kubenswrapper[4791]: I1208 21:54:00.059701 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-7j742"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.035721 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-ncw6l"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.054244 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-868dn"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.065938 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-5219-account-create-update-jxhzk"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.074572 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-5219-account-create-update-jxhzk"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.083622 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-f316-account-create-update-pwkqh"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.092172 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-ncw6l"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.101843 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-f316-account-create-update-pwkqh"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.115377 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-868dn"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.126744 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8be1-account-create-update-k6gx9"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.155894 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-8be1-account-create-update-k6gx9"] Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.614472 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32971ca2-45d0-455e-9bd3-3452c7d044e0" path="/var/lib/kubelet/pods/32971ca2-45d0-455e-9bd3-3452c7d044e0/volumes" Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.618099 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bab6978-7963-4e17-aa8b-a814764f4393" path="/var/lib/kubelet/pods/5bab6978-7963-4e17-aa8b-a814764f4393/volumes" Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.619035 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ffcf886-dc11-49ff-9ab5-ee93d739852e" path="/var/lib/kubelet/pods/5ffcf886-dc11-49ff-9ab5-ee93d739852e/volumes" Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.619860 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73358556-e1fc-4ffe-a4ce-5b0c131b5c10" path="/var/lib/kubelet/pods/73358556-e1fc-4ffe-a4ce-5b0c131b5c10/volumes" Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.621587 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a9c9a16-208d-4c67-bdb5-8300013965ff" path="/var/lib/kubelet/pods/9a9c9a16-208d-4c67-bdb5-8300013965ff/volumes" Dec 08 21:54:01 crc kubenswrapper[4791]: I1208 21:54:01.622598 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc095445-ea14-4648-9198-f86b355ec210" path="/var/lib/kubelet/pods/cc095445-ea14-4648-9198-f86b355ec210/volumes" Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.251187 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.251514 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.251568 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.252785 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cb2daff64a39d72de0b926c6af030bdc8c907c270ddf6c6e8fd0e05a72b32d20"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.252852 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://cb2daff64a39d72de0b926c6af030bdc8c907c270ddf6c6e8fd0e05a72b32d20" gracePeriod=600 Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.862352 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="cb2daff64a39d72de0b926c6af030bdc8c907c270ddf6c6e8fd0e05a72b32d20" exitCode=0 Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.862434 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"cb2daff64a39d72de0b926c6af030bdc8c907c270ddf6c6e8fd0e05a72b32d20"} Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.862942 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c"} Dec 08 21:54:05 crc kubenswrapper[4791]: I1208 21:54:05.862969 4791 scope.go:117] "RemoveContainer" containerID="378e3a5a4f2c0073c5af7e3a66a8b9357a6d3b10e58f0a7f472dd39ed5f18e43" Dec 08 21:54:10 crc kubenswrapper[4791]: I1208 21:54:10.598281 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:54:10 crc kubenswrapper[4791]: E1208 21:54:10.599092 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:54:16 crc kubenswrapper[4791]: I1208 21:54:16.878332 4791 scope.go:117] "RemoveContainer" containerID="940d84f1c607e42e27d563201496fe5fee7d39f5ab579471f4695b8ded2bd000" Dec 08 21:54:16 crc kubenswrapper[4791]: I1208 21:54:16.921493 4791 scope.go:117] "RemoveContainer" containerID="c35ab6fa6c43f48deabd6df0a1d6ddb73543c365211d4a5c768078b80f38864f" Dec 08 21:54:16 crc kubenswrapper[4791]: I1208 21:54:16.974210 4791 scope.go:117] "RemoveContainer" containerID="8d29b4dffdccc90ca88f0c76136906a7a9d4109c008fb7bc94d6bffa7d722ef7" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.038830 4791 scope.go:117] "RemoveContainer" containerID="30b8b2ba637299d4be16caffee728b676448aa3e76ba0c34ecbef84b14405d8f" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.075758 4791 scope.go:117] "RemoveContainer" containerID="e9e1e087c6ccf387445b0f54b9b52098ea90eb510a2ae283a68d6c9d320b4361" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.129435 4791 scope.go:117] "RemoveContainer" containerID="907c61b885c94e5a7baf7b609f9e0b5e5c2b64a469c42d80901ad3a1f6464f98" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.190041 4791 scope.go:117] "RemoveContainer" containerID="b139cd64f0e7ab7c3c66542847ce7e165093eee97f2b23bbde953f003e7f459e" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.216894 4791 scope.go:117] "RemoveContainer" containerID="1ce86c557a404fcbe7a734119c146d96bbaaf5b5a63dcf1c5fbaab4242179618" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.242029 4791 scope.go:117] "RemoveContainer" containerID="efbe1692614514a129d83ff55d87a7174a9446605c0dfa7b06ae2a46f9333723" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.277899 4791 scope.go:117] "RemoveContainer" containerID="3dd8e973eb5c92752716d38c73665cdbcf4339027926db29ac71b2ce5e6c3bb4" Dec 08 21:54:17 crc kubenswrapper[4791]: I1208 21:54:17.302740 4791 scope.go:117] "RemoveContainer" containerID="cbda81db0160321f783877c28abbc1bd4331fd4fa74bf5a71ca6fbe3f4332249" Dec 08 21:54:23 crc kubenswrapper[4791]: I1208 21:54:23.605769 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:54:23 crc kubenswrapper[4791]: E1208 21:54:23.606750 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:54:38 crc kubenswrapper[4791]: I1208 21:54:38.598507 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:54:38 crc kubenswrapper[4791]: E1208 21:54:38.599759 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:54:43 crc kubenswrapper[4791]: I1208 21:54:43.049233 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bm9lh"] Dec 08 21:54:43 crc kubenswrapper[4791]: I1208 21:54:43.063768 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bm9lh"] Dec 08 21:54:43 crc kubenswrapper[4791]: I1208 21:54:43.614250 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e02fffe8-6208-48f0-ba89-6d54f07f5ae4" path="/var/lib/kubelet/pods/e02fffe8-6208-48f0-ba89-6d54f07f5ae4/volumes" Dec 08 21:54:53 crc kubenswrapper[4791]: I1208 21:54:53.608453 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:54:53 crc kubenswrapper[4791]: E1208 21:54:53.609594 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:55:03 crc kubenswrapper[4791]: I1208 21:55:03.050506 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-6tl54"] Dec 08 21:55:03 crc kubenswrapper[4791]: I1208 21:55:03.064840 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-6tl54"] Dec 08 21:55:03 crc kubenswrapper[4791]: I1208 21:55:03.612913 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5bdb996-544f-412b-8279-09b9e0bc2510" path="/var/lib/kubelet/pods/f5bdb996-544f-412b-8279-09b9e0bc2510/volumes" Dec 08 21:55:08 crc kubenswrapper[4791]: I1208 21:55:08.597954 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:55:09 crc kubenswrapper[4791]: I1208 21:55:09.030678 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wkrbl"] Dec 08 21:55:09 crc kubenswrapper[4791]: I1208 21:55:09.040989 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wkrbl"] Dec 08 21:55:09 crc kubenswrapper[4791]: I1208 21:55:09.505452 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f"} Dec 08 21:55:09 crc kubenswrapper[4791]: I1208 21:55:09.505753 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:55:09 crc kubenswrapper[4791]: I1208 21:55:09.610493 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d1939f0-2dba-46a3-96ab-bb0fd01e0c40" path="/var/lib/kubelet/pods/4d1939f0-2dba-46a3-96ab-bb0fd01e0c40/volumes" Dec 08 21:55:15 crc kubenswrapper[4791]: I1208 21:55:15.888119 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:55:17 crc kubenswrapper[4791]: I1208 21:55:17.540430 4791 scope.go:117] "RemoveContainer" containerID="8e3dbe569a4ad5f2c161b5771769cc3381bf0ca2f72a94a5aee50f86d778ade9" Dec 08 21:55:17 crc kubenswrapper[4791]: I1208 21:55:17.596543 4791 scope.go:117] "RemoveContainer" containerID="34b6a0637268b786e00dbaf8e3334ebb53ccf8ed29eca966d9e836eafc366ba3" Dec 08 21:55:17 crc kubenswrapper[4791]: I1208 21:55:17.644059 4791 scope.go:117] "RemoveContainer" containerID="55fad5012a7e2bb2fe91c5086e8fd40d5b5fddcc9b496c17fdb3a2c863152c60" Dec 08 21:55:47 crc kubenswrapper[4791]: I1208 21:55:47.058281 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-ss2d8"] Dec 08 21:55:47 crc kubenswrapper[4791]: I1208 21:55:47.068970 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-ss2d8"] Dec 08 21:55:47 crc kubenswrapper[4791]: I1208 21:55:47.615939 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8006edc4-8fd5-474b-b98b-a70c34c93f33" path="/var/lib/kubelet/pods/8006edc4-8fd5-474b-b98b-a70c34c93f33/volumes" Dec 08 21:56:05 crc kubenswrapper[4791]: I1208 21:56:05.251510 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:56:05 crc kubenswrapper[4791]: I1208 21:56:05.252036 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:56:17 crc kubenswrapper[4791]: I1208 21:56:17.803936 4791 scope.go:117] "RemoveContainer" containerID="8a5158d9484db47869bc4bb710b7fd477fef7abfb938ba54a5ce3368f10057e2" Dec 08 21:56:35 crc kubenswrapper[4791]: I1208 21:56:35.252262 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:56:35 crc kubenswrapper[4791]: I1208 21:56:35.252941 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.251918 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.252386 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.252436 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.253193 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.253241 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" gracePeriod=600 Dec 08 21:57:05 crc kubenswrapper[4791]: E1208 21:57:05.385504 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.654776 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" exitCode=0 Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.654790 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c"} Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.655150 4791 scope.go:117] "RemoveContainer" containerID="cb2daff64a39d72de0b926c6af030bdc8c907c270ddf6c6e8fd0e05a72b32d20" Dec 08 21:57:05 crc kubenswrapper[4791]: I1208 21:57:05.656065 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:57:05 crc kubenswrapper[4791]: E1208 21:57:05.656590 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:57:18 crc kubenswrapper[4791]: I1208 21:57:18.598603 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:57:18 crc kubenswrapper[4791]: E1208 21:57:18.599414 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:57:29 crc kubenswrapper[4791]: I1208 21:57:29.597869 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:57:29 crc kubenswrapper[4791]: E1208 21:57:29.598802 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:57:42 crc kubenswrapper[4791]: I1208 21:57:42.597672 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:57:42 crc kubenswrapper[4791]: E1208 21:57:42.598659 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:57:48 crc kubenswrapper[4791]: I1208 21:57:48.114443 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" exitCode=1 Dec 08 21:57:48 crc kubenswrapper[4791]: I1208 21:57:48.114530 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f"} Dec 08 21:57:48 crc kubenswrapper[4791]: I1208 21:57:48.115074 4791 scope.go:117] "RemoveContainer" containerID="7b318b89be0687f407870f3d93b240e21965bed6c9bfb1ba171a714d9ae74618" Dec 08 21:57:48 crc kubenswrapper[4791]: I1208 21:57:48.116011 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:57:48 crc kubenswrapper[4791]: E1208 21:57:48.116433 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:57:55 crc kubenswrapper[4791]: I1208 21:57:55.885059 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:57:55 crc kubenswrapper[4791]: I1208 21:57:55.886404 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:57:55 crc kubenswrapper[4791]: E1208 21:57:55.886671 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:57:56 crc kubenswrapper[4791]: I1208 21:57:56.598041 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:57:56 crc kubenswrapper[4791]: E1208 21:57:56.598555 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:58:05 crc kubenswrapper[4791]: I1208 21:58:05.884748 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 21:58:05 crc kubenswrapper[4791]: I1208 21:58:05.886260 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:58:05 crc kubenswrapper[4791]: E1208 21:58:05.886749 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:58:10 crc kubenswrapper[4791]: I1208 21:58:10.598481 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:58:10 crc kubenswrapper[4791]: E1208 21:58:10.599343 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:58:17 crc kubenswrapper[4791]: I1208 21:58:17.647233 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:58:17 crc kubenswrapper[4791]: E1208 21:58:17.651047 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:58:22 crc kubenswrapper[4791]: I1208 21:58:22.598335 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:58:22 crc kubenswrapper[4791]: E1208 21:58:22.599044 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:58:31 crc kubenswrapper[4791]: I1208 21:58:31.599109 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:58:31 crc kubenswrapper[4791]: E1208 21:58:31.600249 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:58:35 crc kubenswrapper[4791]: I1208 21:58:35.598292 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:58:35 crc kubenswrapper[4791]: E1208 21:58:35.599573 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:58:45 crc kubenswrapper[4791]: I1208 21:58:45.598621 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:58:45 crc kubenswrapper[4791]: E1208 21:58:45.599495 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:58:47 crc kubenswrapper[4791]: I1208 21:58:47.598486 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:58:47 crc kubenswrapper[4791]: E1208 21:58:47.599099 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:58:59 crc kubenswrapper[4791]: I1208 21:58:59.598748 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:58:59 crc kubenswrapper[4791]: E1208 21:58:59.599612 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:59:02 crc kubenswrapper[4791]: I1208 21:59:02.597449 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:59:02 crc kubenswrapper[4791]: E1208 21:59:02.598206 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:59:13 crc kubenswrapper[4791]: I1208 21:59:13.604788 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:59:13 crc kubenswrapper[4791]: E1208 21:59:13.605585 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:59:16 crc kubenswrapper[4791]: I1208 21:59:16.599526 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:59:16 crc kubenswrapper[4791]: E1208 21:59:16.600626 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:59:26 crc kubenswrapper[4791]: I1208 21:59:26.598798 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:59:26 crc kubenswrapper[4791]: E1208 21:59:26.599931 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:59:30 crc kubenswrapper[4791]: I1208 21:59:30.598567 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:59:30 crc kubenswrapper[4791]: E1208 21:59:30.600180 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:59:39 crc kubenswrapper[4791]: I1208 21:59:39.598381 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:59:39 crc kubenswrapper[4791]: E1208 21:59:39.599512 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:59:43 crc kubenswrapper[4791]: I1208 21:59:43.606349 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:59:43 crc kubenswrapper[4791]: E1208 21:59:43.607155 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 21:59:50 crc kubenswrapper[4791]: I1208 21:59:50.597992 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 21:59:50 crc kubenswrapper[4791]: E1208 21:59:50.598879 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 21:59:56 crc kubenswrapper[4791]: I1208 21:59:56.597576 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 21:59:56 crc kubenswrapper[4791]: E1208 21:59:56.598430 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.145292 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5"] Dec 08 22:00:00 crc kubenswrapper[4791]: E1208 22:00:00.146600 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="extract-content" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.146640 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="extract-content" Dec 08 22:00:00 crc kubenswrapper[4791]: E1208 22:00:00.146692 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="extract-utilities" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.146700 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="extract-utilities" Dec 08 22:00:00 crc kubenswrapper[4791]: E1208 22:00:00.146740 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.146750 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.147019 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e728dd26-8313-483e-b05e-97d9acf9f180" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.148149 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.151338 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.152663 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.159486 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5"] Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.326092 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/423e750a-05a4-43db-9c7b-57717d6e3903-secret-volume\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.326409 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vdr2\" (UniqueName: \"kubernetes.io/projected/423e750a-05a4-43db-9c7b-57717d6e3903-kube-api-access-4vdr2\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.326688 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/423e750a-05a4-43db-9c7b-57717d6e3903-config-volume\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.429014 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/423e750a-05a4-43db-9c7b-57717d6e3903-config-volume\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.429144 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/423e750a-05a4-43db-9c7b-57717d6e3903-secret-volume\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.429229 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vdr2\" (UniqueName: \"kubernetes.io/projected/423e750a-05a4-43db-9c7b-57717d6e3903-kube-api-access-4vdr2\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.430042 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/423e750a-05a4-43db-9c7b-57717d6e3903-config-volume\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.434916 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/423e750a-05a4-43db-9c7b-57717d6e3903-secret-volume\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.445354 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vdr2\" (UniqueName: \"kubernetes.io/projected/423e750a-05a4-43db-9c7b-57717d6e3903-kube-api-access-4vdr2\") pod \"collect-profiles-29420520-pkcq5\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:00 crc kubenswrapper[4791]: I1208 22:00:00.545142 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:01 crc kubenswrapper[4791]: I1208 22:00:01.026457 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5"] Dec 08 22:00:01 crc kubenswrapper[4791]: I1208 22:00:01.535580 4791 generic.go:334] "Generic (PLEG): container finished" podID="423e750a-05a4-43db-9c7b-57717d6e3903" containerID="401bec1e857e0b5aeb9d702ceafd0e5997a0c95232c3ceeeb945c29acfd1db2e" exitCode=0 Dec 08 22:00:01 crc kubenswrapper[4791]: I1208 22:00:01.535630 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" event={"ID":"423e750a-05a4-43db-9c7b-57717d6e3903","Type":"ContainerDied","Data":"401bec1e857e0b5aeb9d702ceafd0e5997a0c95232c3ceeeb945c29acfd1db2e"} Dec 08 22:00:01 crc kubenswrapper[4791]: I1208 22:00:01.535933 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" event={"ID":"423e750a-05a4-43db-9c7b-57717d6e3903","Type":"ContainerStarted","Data":"829acdb17ce8e82e53e6a5373c17cfadf040ef58e9721d0302c3733f6cfcc6df"} Dec 08 22:00:02 crc kubenswrapper[4791]: I1208 22:00:02.598452 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 22:00:02 crc kubenswrapper[4791]: E1208 22:00:02.598866 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:00:02 crc kubenswrapper[4791]: I1208 22:00:02.945787 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.094115 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vdr2\" (UniqueName: \"kubernetes.io/projected/423e750a-05a4-43db-9c7b-57717d6e3903-kube-api-access-4vdr2\") pod \"423e750a-05a4-43db-9c7b-57717d6e3903\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.094215 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/423e750a-05a4-43db-9c7b-57717d6e3903-secret-volume\") pod \"423e750a-05a4-43db-9c7b-57717d6e3903\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.094320 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/423e750a-05a4-43db-9c7b-57717d6e3903-config-volume\") pod \"423e750a-05a4-43db-9c7b-57717d6e3903\" (UID: \"423e750a-05a4-43db-9c7b-57717d6e3903\") " Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.095284 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/423e750a-05a4-43db-9c7b-57717d6e3903-config-volume" (OuterVolumeSpecName: "config-volume") pod "423e750a-05a4-43db-9c7b-57717d6e3903" (UID: "423e750a-05a4-43db-9c7b-57717d6e3903"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.099686 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423e750a-05a4-43db-9c7b-57717d6e3903-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "423e750a-05a4-43db-9c7b-57717d6e3903" (UID: "423e750a-05a4-43db-9c7b-57717d6e3903"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.099773 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/423e750a-05a4-43db-9c7b-57717d6e3903-kube-api-access-4vdr2" (OuterVolumeSpecName: "kube-api-access-4vdr2") pod "423e750a-05a4-43db-9c7b-57717d6e3903" (UID: "423e750a-05a4-43db-9c7b-57717d6e3903"). InnerVolumeSpecName "kube-api-access-4vdr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.196324 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/423e750a-05a4-43db-9c7b-57717d6e3903-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.196356 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vdr2\" (UniqueName: \"kubernetes.io/projected/423e750a-05a4-43db-9c7b-57717d6e3903-kube-api-access-4vdr2\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.196367 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/423e750a-05a4-43db-9c7b-57717d6e3903-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.562560 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" event={"ID":"423e750a-05a4-43db-9c7b-57717d6e3903","Type":"ContainerDied","Data":"829acdb17ce8e82e53e6a5373c17cfadf040ef58e9721d0302c3733f6cfcc6df"} Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.562894 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="829acdb17ce8e82e53e6a5373c17cfadf040ef58e9721d0302c3733f6cfcc6df" Dec 08 22:00:03 crc kubenswrapper[4791]: I1208 22:00:03.562656 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5" Dec 08 22:00:04 crc kubenswrapper[4791]: I1208 22:00:04.025815 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt"] Dec 08 22:00:04 crc kubenswrapper[4791]: I1208 22:00:04.041432 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-jq6vt"] Dec 08 22:00:05 crc kubenswrapper[4791]: I1208 22:00:05.614346 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="527bb8ce-24f8-4bcf-a100-457e11dac79d" path="/var/lib/kubelet/pods/527bb8ce-24f8-4bcf-a100-457e11dac79d/volumes" Dec 08 22:00:07 crc kubenswrapper[4791]: I1208 22:00:07.598463 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:00:07 crc kubenswrapper[4791]: E1208 22:00:07.600569 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:00:16 crc kubenswrapper[4791]: I1208 22:00:16.598574 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 22:00:16 crc kubenswrapper[4791]: E1208 22:00:16.599765 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:00:18 crc kubenswrapper[4791]: I1208 22:00:18.016511 4791 scope.go:117] "RemoveContainer" containerID="49fe55620982a62abc6abcd0a4bbd76055ac3cf31d2627423e8539f2029b7b50" Dec 08 22:00:20 crc kubenswrapper[4791]: I1208 22:00:20.599563 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:00:20 crc kubenswrapper[4791]: E1208 22:00:20.601182 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:00:31 crc kubenswrapper[4791]: I1208 22:00:31.598610 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 22:00:31 crc kubenswrapper[4791]: I1208 22:00:31.836515 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af"} Dec 08 22:00:31 crc kubenswrapper[4791]: I1208 22:00:31.838355 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:00:32 crc kubenswrapper[4791]: I1208 22:00:32.598275 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:00:32 crc kubenswrapper[4791]: E1208 22:00:32.598905 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.371521 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c6rk6"] Dec 08 22:00:38 crc kubenswrapper[4791]: E1208 22:00:38.373018 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="423e750a-05a4-43db-9c7b-57717d6e3903" containerName="collect-profiles" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.373057 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="423e750a-05a4-43db-9c7b-57717d6e3903" containerName="collect-profiles" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.373503 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="423e750a-05a4-43db-9c7b-57717d6e3903" containerName="collect-profiles" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.377674 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.386605 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6rk6"] Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.446442 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-utilities\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.446518 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-catalog-content\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.447033 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdhcr\" (UniqueName: \"kubernetes.io/projected/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-kube-api-access-pdhcr\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.549241 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdhcr\" (UniqueName: \"kubernetes.io/projected/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-kube-api-access-pdhcr\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.549389 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-utilities\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.549447 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-catalog-content\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.549915 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-utilities\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.549939 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-catalog-content\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.569722 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdhcr\" (UniqueName: \"kubernetes.io/projected/ce8d3d3b-2b87-4d1b-83b8-000beccecf40-kube-api-access-pdhcr\") pod \"certified-operators-c6rk6\" (UID: \"ce8d3d3b-2b87-4d1b-83b8-000beccecf40\") " pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:38 crc kubenswrapper[4791]: I1208 22:00:38.709489 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:39 crc kubenswrapper[4791]: I1208 22:00:39.276518 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6rk6"] Dec 08 22:00:39 crc kubenswrapper[4791]: I1208 22:00:39.921798 4791 generic.go:334] "Generic (PLEG): container finished" podID="ce8d3d3b-2b87-4d1b-83b8-000beccecf40" containerID="84814a36c2d01b6e040d0616410f7078dc97d64142dba811e32226e3bf1d3aa3" exitCode=0 Dec 08 22:00:39 crc kubenswrapper[4791]: I1208 22:00:39.921915 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6rk6" event={"ID":"ce8d3d3b-2b87-4d1b-83b8-000beccecf40","Type":"ContainerDied","Data":"84814a36c2d01b6e040d0616410f7078dc97d64142dba811e32226e3bf1d3aa3"} Dec 08 22:00:39 crc kubenswrapper[4791]: I1208 22:00:39.922227 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6rk6" event={"ID":"ce8d3d3b-2b87-4d1b-83b8-000beccecf40","Type":"ContainerStarted","Data":"0b4e1df2a17f6332b9b0bae97c962e189d1c7abde592df256cdc4fe23bfb9421"} Dec 08 22:00:39 crc kubenswrapper[4791]: I1208 22:00:39.924043 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:00:45 crc kubenswrapper[4791]: I1208 22:00:45.887283 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:00:46 crc kubenswrapper[4791]: I1208 22:00:46.008617 4791 generic.go:334] "Generic (PLEG): container finished" podID="ce8d3d3b-2b87-4d1b-83b8-000beccecf40" containerID="cc46754d8a218cd3478315ca9c574c4bd8b33fe245e3013504c5a6a226ae4919" exitCode=0 Dec 08 22:00:46 crc kubenswrapper[4791]: I1208 22:00:46.008667 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6rk6" event={"ID":"ce8d3d3b-2b87-4d1b-83b8-000beccecf40","Type":"ContainerDied","Data":"cc46754d8a218cd3478315ca9c574c4bd8b33fe245e3013504c5a6a226ae4919"} Dec 08 22:00:46 crc kubenswrapper[4791]: I1208 22:00:46.598173 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:00:46 crc kubenswrapper[4791]: E1208 22:00:46.598846 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:00:47 crc kubenswrapper[4791]: I1208 22:00:47.021342 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6rk6" event={"ID":"ce8d3d3b-2b87-4d1b-83b8-000beccecf40","Type":"ContainerStarted","Data":"7247d167a269054b2d08f270136ef20b990913339920f16a94082cf238205ac3"} Dec 08 22:00:47 crc kubenswrapper[4791]: I1208 22:00:47.046785 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c6rk6" podStartSLOduration=2.490917275 podStartE2EDuration="9.046762483s" podCreationTimestamp="2025-12-08 22:00:38 +0000 UTC" firstStartedPulling="2025-12-08 22:00:39.92383033 +0000 UTC m=+2516.622588675" lastFinishedPulling="2025-12-08 22:00:46.479675538 +0000 UTC m=+2523.178433883" observedRunningTime="2025-12-08 22:00:47.041738857 +0000 UTC m=+2523.740497212" watchObservedRunningTime="2025-12-08 22:00:47.046762483 +0000 UTC m=+2523.745520828" Dec 08 22:00:48 crc kubenswrapper[4791]: I1208 22:00:48.710251 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:48 crc kubenswrapper[4791]: I1208 22:00:48.710665 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:48 crc kubenswrapper[4791]: I1208 22:00:48.777940 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:58 crc kubenswrapper[4791]: I1208 22:00:58.767861 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c6rk6" Dec 08 22:00:58 crc kubenswrapper[4791]: I1208 22:00:58.848322 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6rk6"] Dec 08 22:00:58 crc kubenswrapper[4791]: I1208 22:00:58.888968 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qh8xd"] Dec 08 22:00:58 crc kubenswrapper[4791]: I1208 22:00:58.889272 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qh8xd" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="registry-server" containerID="cri-o://97036992f4080899ac7e8d3659c971363b4860dcd3f6ad119cd9ad0bb02a8c05" gracePeriod=2 Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.148342 4791 generic.go:334] "Generic (PLEG): container finished" podID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerID="97036992f4080899ac7e8d3659c971363b4860dcd3f6ad119cd9ad0bb02a8c05" exitCode=0 Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.148405 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qh8xd" event={"ID":"ac6734ae-0444-4576-aa83-49f70e05ade6","Type":"ContainerDied","Data":"97036992f4080899ac7e8d3659c971363b4860dcd3f6ad119cd9ad0bb02a8c05"} Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.479086 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.603594 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hw8ln\" (UniqueName: \"kubernetes.io/projected/ac6734ae-0444-4576-aa83-49f70e05ade6-kube-api-access-hw8ln\") pod \"ac6734ae-0444-4576-aa83-49f70e05ade6\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.603693 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-utilities\") pod \"ac6734ae-0444-4576-aa83-49f70e05ade6\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.604201 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-catalog-content\") pod \"ac6734ae-0444-4576-aa83-49f70e05ade6\" (UID: \"ac6734ae-0444-4576-aa83-49f70e05ade6\") " Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.604463 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-utilities" (OuterVolumeSpecName: "utilities") pod "ac6734ae-0444-4576-aa83-49f70e05ade6" (UID: "ac6734ae-0444-4576-aa83-49f70e05ade6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.604818 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.612623 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac6734ae-0444-4576-aa83-49f70e05ade6-kube-api-access-hw8ln" (OuterVolumeSpecName: "kube-api-access-hw8ln") pod "ac6734ae-0444-4576-aa83-49f70e05ade6" (UID: "ac6734ae-0444-4576-aa83-49f70e05ade6"). InnerVolumeSpecName "kube-api-access-hw8ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.698876 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac6734ae-0444-4576-aa83-49f70e05ade6" (UID: "ac6734ae-0444-4576-aa83-49f70e05ade6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.707365 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hw8ln\" (UniqueName: \"kubernetes.io/projected/ac6734ae-0444-4576-aa83-49f70e05ade6-kube-api-access-hw8ln\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:59 crc kubenswrapper[4791]: I1208 22:00:59.707396 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6734ae-0444-4576-aa83-49f70e05ade6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.163149 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qh8xd" event={"ID":"ac6734ae-0444-4576-aa83-49f70e05ade6","Type":"ContainerDied","Data":"19821e003ed7346e51905f68e99dc9069c26fa0d7c48593366ed0154c9a10bc6"} Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.163216 4791 scope.go:117] "RemoveContainer" containerID="97036992f4080899ac7e8d3659c971363b4860dcd3f6ad119cd9ad0bb02a8c05" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.163220 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qh8xd" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.169537 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29420521-54kx5"] Dec 08 22:01:00 crc kubenswrapper[4791]: E1208 22:01:00.170596 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="extract-utilities" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.170618 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="extract-utilities" Dec 08 22:01:00 crc kubenswrapper[4791]: E1208 22:01:00.170660 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="registry-server" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.170669 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="registry-server" Dec 08 22:01:00 crc kubenswrapper[4791]: E1208 22:01:00.170685 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="extract-content" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.170693 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="extract-content" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.171025 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" containerName="registry-server" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.172180 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.195852 4791 scope.go:117] "RemoveContainer" containerID="f14aab193ae54ba74e48a72f07d288a4a08cc080df364d116512ba6707551fcf" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.214157 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29420521-54kx5"] Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.247802 4791 scope.go:117] "RemoveContainer" containerID="3f3ddbfd05068b1cf2bde3e51b7070cd36143eac8ebe7d57fa9422944657e24a" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.297758 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qh8xd"] Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.309084 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qh8xd"] Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.320260 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-config-data\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.320488 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-fernet-keys\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.320698 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-combined-ca-bundle\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.320790 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz55n\" (UniqueName: \"kubernetes.io/projected/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-kube-api-access-zz55n\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.423049 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-config-data\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.423094 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-fernet-keys\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.423137 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-combined-ca-bundle\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.423169 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz55n\" (UniqueName: \"kubernetes.io/projected/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-kube-api-access-zz55n\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.429323 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-fernet-keys\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.429589 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-config-data\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.429630 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-combined-ca-bundle\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.446555 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz55n\" (UniqueName: \"kubernetes.io/projected/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-kube-api-access-zz55n\") pod \"keystone-cron-29420521-54kx5\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:00 crc kubenswrapper[4791]: I1208 22:01:00.574682 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:01 crc kubenswrapper[4791]: I1208 22:01:01.079088 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29420521-54kx5"] Dec 08 22:01:01 crc kubenswrapper[4791]: I1208 22:01:01.191255 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-54kx5" event={"ID":"0d3bd890-27a0-476f-85fb-55fbdb17e6a4","Type":"ContainerStarted","Data":"e9efd44d270d5946950be0022fcf044d9dca5cf82bed3582aa57c8501173a5af"} Dec 08 22:01:01 crc kubenswrapper[4791]: I1208 22:01:01.598178 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:01:01 crc kubenswrapper[4791]: E1208 22:01:01.598770 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:01:01 crc kubenswrapper[4791]: I1208 22:01:01.614665 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac6734ae-0444-4576-aa83-49f70e05ade6" path="/var/lib/kubelet/pods/ac6734ae-0444-4576-aa83-49f70e05ade6/volumes" Dec 08 22:01:02 crc kubenswrapper[4791]: I1208 22:01:02.205358 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-54kx5" event={"ID":"0d3bd890-27a0-476f-85fb-55fbdb17e6a4","Type":"ContainerStarted","Data":"669f15b688e241636ec3feed92af60e12932860469bfa3683ad018021301060e"} Dec 08 22:01:02 crc kubenswrapper[4791]: I1208 22:01:02.225879 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29420521-54kx5" podStartSLOduration=2.22586145 podStartE2EDuration="2.22586145s" podCreationTimestamp="2025-12-08 22:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 22:01:02.223220313 +0000 UTC m=+2538.921978658" watchObservedRunningTime="2025-12-08 22:01:02.22586145 +0000 UTC m=+2538.924619795" Dec 08 22:01:04 crc kubenswrapper[4791]: I1208 22:01:04.225548 4791 generic.go:334] "Generic (PLEG): container finished" podID="0d3bd890-27a0-476f-85fb-55fbdb17e6a4" containerID="669f15b688e241636ec3feed92af60e12932860469bfa3683ad018021301060e" exitCode=0 Dec 08 22:01:04 crc kubenswrapper[4791]: I1208 22:01:04.225641 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-54kx5" event={"ID":"0d3bd890-27a0-476f-85fb-55fbdb17e6a4","Type":"ContainerDied","Data":"669f15b688e241636ec3feed92af60e12932860469bfa3683ad018021301060e"} Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.635386 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.700865 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz55n\" (UniqueName: \"kubernetes.io/projected/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-kube-api-access-zz55n\") pod \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.701178 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-combined-ca-bundle\") pod \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.701227 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-config-data\") pod \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.701586 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-fernet-keys\") pod \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\" (UID: \"0d3bd890-27a0-476f-85fb-55fbdb17e6a4\") " Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.706500 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-kube-api-access-zz55n" (OuterVolumeSpecName: "kube-api-access-zz55n") pod "0d3bd890-27a0-476f-85fb-55fbdb17e6a4" (UID: "0d3bd890-27a0-476f-85fb-55fbdb17e6a4"). InnerVolumeSpecName "kube-api-access-zz55n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.713856 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0d3bd890-27a0-476f-85fb-55fbdb17e6a4" (UID: "0d3bd890-27a0-476f-85fb-55fbdb17e6a4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.730313 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d3bd890-27a0-476f-85fb-55fbdb17e6a4" (UID: "0d3bd890-27a0-476f-85fb-55fbdb17e6a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.767169 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-config-data" (OuterVolumeSpecName: "config-data") pod "0d3bd890-27a0-476f-85fb-55fbdb17e6a4" (UID: "0d3bd890-27a0-476f-85fb-55fbdb17e6a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.804553 4791 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.804582 4791 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.804591 4791 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:05 crc kubenswrapper[4791]: I1208 22:01:05.804602 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz55n\" (UniqueName: \"kubernetes.io/projected/0d3bd890-27a0-476f-85fb-55fbdb17e6a4-kube-api-access-zz55n\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:06 crc kubenswrapper[4791]: I1208 22:01:06.245274 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-54kx5" event={"ID":"0d3bd890-27a0-476f-85fb-55fbdb17e6a4","Type":"ContainerDied","Data":"e9efd44d270d5946950be0022fcf044d9dca5cf82bed3582aa57c8501173a5af"} Dec 08 22:01:06 crc kubenswrapper[4791]: I1208 22:01:06.245316 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9efd44d270d5946950be0022fcf044d9dca5cf82bed3582aa57c8501173a5af" Dec 08 22:01:06 crc kubenswrapper[4791]: I1208 22:01:06.245740 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-54kx5" Dec 08 22:01:12 crc kubenswrapper[4791]: I1208 22:01:12.598899 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:01:12 crc kubenswrapper[4791]: E1208 22:01:12.599767 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:01:21 crc kubenswrapper[4791]: I1208 22:01:21.955934 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gn574"] Dec 08 22:01:21 crc kubenswrapper[4791]: E1208 22:01:21.957256 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3bd890-27a0-476f-85fb-55fbdb17e6a4" containerName="keystone-cron" Dec 08 22:01:21 crc kubenswrapper[4791]: I1208 22:01:21.957277 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3bd890-27a0-476f-85fb-55fbdb17e6a4" containerName="keystone-cron" Dec 08 22:01:21 crc kubenswrapper[4791]: I1208 22:01:21.957623 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3bd890-27a0-476f-85fb-55fbdb17e6a4" containerName="keystone-cron" Dec 08 22:01:21 crc kubenswrapper[4791]: I1208 22:01:21.959513 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:21 crc kubenswrapper[4791]: I1208 22:01:21.974847 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn574"] Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.060077 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-utilities\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.060610 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-catalog-content\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.060801 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmqrs\" (UniqueName: \"kubernetes.io/projected/a131a720-c20d-4e35-b2ec-b2bc0540916b-kube-api-access-qmqrs\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.162473 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-utilities\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.162678 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-catalog-content\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.162775 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmqrs\" (UniqueName: \"kubernetes.io/projected/a131a720-c20d-4e35-b2ec-b2bc0540916b-kube-api-access-qmqrs\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.163037 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-utilities\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.163203 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-catalog-content\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.189631 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmqrs\" (UniqueName: \"kubernetes.io/projected/a131a720-c20d-4e35-b2ec-b2bc0540916b-kube-api-access-qmqrs\") pod \"redhat-marketplace-gn574\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.340121 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:22 crc kubenswrapper[4791]: I1208 22:01:22.841668 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn574"] Dec 08 22:01:23 crc kubenswrapper[4791]: I1208 22:01:23.405768 4791 generic.go:334] "Generic (PLEG): container finished" podID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerID="b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c" exitCode=0 Dec 08 22:01:23 crc kubenswrapper[4791]: I1208 22:01:23.405866 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn574" event={"ID":"a131a720-c20d-4e35-b2ec-b2bc0540916b","Type":"ContainerDied","Data":"b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c"} Dec 08 22:01:23 crc kubenswrapper[4791]: I1208 22:01:23.406068 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn574" event={"ID":"a131a720-c20d-4e35-b2ec-b2bc0540916b","Type":"ContainerStarted","Data":"383b3f6112d11e15c7001fb0e5e09511fcff79928c0f67bc04d9cdd9b482f972"} Dec 08 22:01:23 crc kubenswrapper[4791]: I1208 22:01:23.604535 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:01:23 crc kubenswrapper[4791]: E1208 22:01:23.604945 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:01:24 crc kubenswrapper[4791]: I1208 22:01:24.421973 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn574" event={"ID":"a131a720-c20d-4e35-b2ec-b2bc0540916b","Type":"ContainerStarted","Data":"e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c"} Dec 08 22:01:25 crc kubenswrapper[4791]: I1208 22:01:25.434898 4791 generic.go:334] "Generic (PLEG): container finished" podID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerID="e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c" exitCode=0 Dec 08 22:01:25 crc kubenswrapper[4791]: I1208 22:01:25.435005 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn574" event={"ID":"a131a720-c20d-4e35-b2ec-b2bc0540916b","Type":"ContainerDied","Data":"e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c"} Dec 08 22:01:27 crc kubenswrapper[4791]: I1208 22:01:27.469652 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn574" event={"ID":"a131a720-c20d-4e35-b2ec-b2bc0540916b","Type":"ContainerStarted","Data":"d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634"} Dec 08 22:01:27 crc kubenswrapper[4791]: I1208 22:01:27.499537 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gn574" podStartSLOduration=3.8297453150000003 podStartE2EDuration="6.499515316s" podCreationTimestamp="2025-12-08 22:01:21 +0000 UTC" firstStartedPulling="2025-12-08 22:01:23.40813999 +0000 UTC m=+2560.106898335" lastFinishedPulling="2025-12-08 22:01:26.077909991 +0000 UTC m=+2562.776668336" observedRunningTime="2025-12-08 22:01:27.488408288 +0000 UTC m=+2564.187166653" watchObservedRunningTime="2025-12-08 22:01:27.499515316 +0000 UTC m=+2564.198273671" Dec 08 22:01:32 crc kubenswrapper[4791]: I1208 22:01:32.340371 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:32 crc kubenswrapper[4791]: I1208 22:01:32.340998 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:32 crc kubenswrapper[4791]: I1208 22:01:32.397007 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:32 crc kubenswrapper[4791]: I1208 22:01:32.575908 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:32 crc kubenswrapper[4791]: I1208 22:01:32.636565 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn574"] Dec 08 22:01:34 crc kubenswrapper[4791]: I1208 22:01:34.547210 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gn574" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="registry-server" containerID="cri-o://d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634" gracePeriod=2 Dec 08 22:01:34 crc kubenswrapper[4791]: I1208 22:01:34.598851 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:01:34 crc kubenswrapper[4791]: E1208 22:01:34.599245 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.557209 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.563277 4791 generic.go:334] "Generic (PLEG): container finished" podID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerID="d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634" exitCode=0 Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.563321 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn574" event={"ID":"a131a720-c20d-4e35-b2ec-b2bc0540916b","Type":"ContainerDied","Data":"d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634"} Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.563347 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn574" event={"ID":"a131a720-c20d-4e35-b2ec-b2bc0540916b","Type":"ContainerDied","Data":"383b3f6112d11e15c7001fb0e5e09511fcff79928c0f67bc04d9cdd9b482f972"} Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.563363 4791 scope.go:117] "RemoveContainer" containerID="d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.563887 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn574" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.589442 4791 scope.go:117] "RemoveContainer" containerID="e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.602039 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-catalog-content\") pod \"a131a720-c20d-4e35-b2ec-b2bc0540916b\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.602149 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmqrs\" (UniqueName: \"kubernetes.io/projected/a131a720-c20d-4e35-b2ec-b2bc0540916b-kube-api-access-qmqrs\") pod \"a131a720-c20d-4e35-b2ec-b2bc0540916b\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.602262 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-utilities\") pod \"a131a720-c20d-4e35-b2ec-b2bc0540916b\" (UID: \"a131a720-c20d-4e35-b2ec-b2bc0540916b\") " Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.606855 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-utilities" (OuterVolumeSpecName: "utilities") pod "a131a720-c20d-4e35-b2ec-b2bc0540916b" (UID: "a131a720-c20d-4e35-b2ec-b2bc0540916b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.613368 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a131a720-c20d-4e35-b2ec-b2bc0540916b-kube-api-access-qmqrs" (OuterVolumeSpecName: "kube-api-access-qmqrs") pod "a131a720-c20d-4e35-b2ec-b2bc0540916b" (UID: "a131a720-c20d-4e35-b2ec-b2bc0540916b"). InnerVolumeSpecName "kube-api-access-qmqrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.619796 4791 scope.go:117] "RemoveContainer" containerID="b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.639442 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a131a720-c20d-4e35-b2ec-b2bc0540916b" (UID: "a131a720-c20d-4e35-b2ec-b2bc0540916b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.704832 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.704913 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmqrs\" (UniqueName: \"kubernetes.io/projected/a131a720-c20d-4e35-b2ec-b2bc0540916b-kube-api-access-qmqrs\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.704941 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a131a720-c20d-4e35-b2ec-b2bc0540916b-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.742450 4791 scope.go:117] "RemoveContainer" containerID="d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634" Dec 08 22:01:35 crc kubenswrapper[4791]: E1208 22:01:35.742995 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634\": container with ID starting with d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634 not found: ID does not exist" containerID="d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.743036 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634"} err="failed to get container status \"d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634\": rpc error: code = NotFound desc = could not find container \"d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634\": container with ID starting with d35d1ed5aaae06a6d630c623eed293b53876d8478316de52add22dfe35cfd634 not found: ID does not exist" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.743057 4791 scope.go:117] "RemoveContainer" containerID="e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c" Dec 08 22:01:35 crc kubenswrapper[4791]: E1208 22:01:35.743343 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c\": container with ID starting with e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c not found: ID does not exist" containerID="e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.743377 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c"} err="failed to get container status \"e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c\": rpc error: code = NotFound desc = could not find container \"e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c\": container with ID starting with e599237e65b45137c84cb68274e275eea67dc04cdc0ebdae51351d9d476b5c9c not found: ID does not exist" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.743391 4791 scope.go:117] "RemoveContainer" containerID="b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c" Dec 08 22:01:35 crc kubenswrapper[4791]: E1208 22:01:35.743829 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c\": container with ID starting with b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c not found: ID does not exist" containerID="b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.743923 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c"} err="failed to get container status \"b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c\": rpc error: code = NotFound desc = could not find container \"b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c\": container with ID starting with b59302ab6434c8d7d439be9eada3f5c8da9085e7ab5b5b6dc148ebe34639854c not found: ID does not exist" Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.910032 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn574"] Dec 08 22:01:35 crc kubenswrapper[4791]: I1208 22:01:35.921157 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn574"] Dec 08 22:01:37 crc kubenswrapper[4791]: I1208 22:01:37.618762 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" path="/var/lib/kubelet/pods/a131a720-c20d-4e35-b2ec-b2bc0540916b/volumes" Dec 08 22:01:49 crc kubenswrapper[4791]: I1208 22:01:49.599833 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:01:49 crc kubenswrapper[4791]: E1208 22:01:49.600485 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:02:01 crc kubenswrapper[4791]: I1208 22:02:01.597815 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:02:01 crc kubenswrapper[4791]: E1208 22:02:01.598463 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:02:16 crc kubenswrapper[4791]: I1208 22:02:16.599020 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:02:17 crc kubenswrapper[4791]: I1208 22:02:17.004895 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"8b357e5859d4c54accba7d3e26bc4b456e8611ccfcc56e88550c314e55fc6dc4"} Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.872183 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4cqpw"] Dec 08 22:02:38 crc kubenswrapper[4791]: E1208 22:02:38.873188 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="registry-server" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.873201 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="registry-server" Dec 08 22:02:38 crc kubenswrapper[4791]: E1208 22:02:38.873222 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="extract-content" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.873227 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="extract-content" Dec 08 22:02:38 crc kubenswrapper[4791]: E1208 22:02:38.873235 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="extract-utilities" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.873241 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="extract-utilities" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.873443 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="a131a720-c20d-4e35-b2ec-b2bc0540916b" containerName="registry-server" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.875414 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.888039 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4cqpw"] Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.948208 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-catalog-content\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.948430 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-utilities\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:38 crc kubenswrapper[4791]: I1208 22:02:38.948470 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzzmp\" (UniqueName: \"kubernetes.io/projected/f689c8df-841b-402b-b6f3-d5bdc56dca5e-kube-api-access-zzzmp\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.052446 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-catalog-content\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.052919 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-catalog-content\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.052971 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-utilities\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.052998 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzzmp\" (UniqueName: \"kubernetes.io/projected/f689c8df-841b-402b-b6f3-d5bdc56dca5e-kube-api-access-zzzmp\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.053445 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-utilities\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.077466 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzzmp\" (UniqueName: \"kubernetes.io/projected/f689c8df-841b-402b-b6f3-d5bdc56dca5e-kube-api-access-zzzmp\") pod \"redhat-operators-4cqpw\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.198207 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:39 crc kubenswrapper[4791]: I1208 22:02:39.709600 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4cqpw"] Dec 08 22:02:40 crc kubenswrapper[4791]: I1208 22:02:40.249576 4791 generic.go:334] "Generic (PLEG): container finished" podID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerID="9a65f3cf5d377b47a990d8c6b67fbb6c46571ec2873785e73440f88b209418ee" exitCode=0 Dec 08 22:02:40 crc kubenswrapper[4791]: I1208 22:02:40.249629 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4cqpw" event={"ID":"f689c8df-841b-402b-b6f3-d5bdc56dca5e","Type":"ContainerDied","Data":"9a65f3cf5d377b47a990d8c6b67fbb6c46571ec2873785e73440f88b209418ee"} Dec 08 22:02:40 crc kubenswrapper[4791]: I1208 22:02:40.249887 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4cqpw" event={"ID":"f689c8df-841b-402b-b6f3-d5bdc56dca5e","Type":"ContainerStarted","Data":"44aa7283173a9823cd76de8e1ec5c10b1ffc8c663aaf2f3dd664015f8db6f37c"} Dec 08 22:02:41 crc kubenswrapper[4791]: I1208 22:02:41.263857 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4cqpw" event={"ID":"f689c8df-841b-402b-b6f3-d5bdc56dca5e","Type":"ContainerStarted","Data":"ba79b8c52af5506967fe7e21082ace45009fdb1f1ef498eb67e5941268200046"} Dec 08 22:02:44 crc kubenswrapper[4791]: I1208 22:02:44.307181 4791 generic.go:334] "Generic (PLEG): container finished" podID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerID="ba79b8c52af5506967fe7e21082ace45009fdb1f1ef498eb67e5941268200046" exitCode=0 Dec 08 22:02:44 crc kubenswrapper[4791]: I1208 22:02:44.307292 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4cqpw" event={"ID":"f689c8df-841b-402b-b6f3-d5bdc56dca5e","Type":"ContainerDied","Data":"ba79b8c52af5506967fe7e21082ace45009fdb1f1ef498eb67e5941268200046"} Dec 08 22:02:45 crc kubenswrapper[4791]: I1208 22:02:45.329847 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4cqpw" event={"ID":"f689c8df-841b-402b-b6f3-d5bdc56dca5e","Type":"ContainerStarted","Data":"1508071cdf5467c45ba86dca36f85e0a1cedaf7798141daef6720d56d161c3aa"} Dec 08 22:02:45 crc kubenswrapper[4791]: I1208 22:02:45.362468 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4cqpw" podStartSLOduration=2.877454672 podStartE2EDuration="7.362445591s" podCreationTimestamp="2025-12-08 22:02:38 +0000 UTC" firstStartedPulling="2025-12-08 22:02:40.251301758 +0000 UTC m=+2636.950060093" lastFinishedPulling="2025-12-08 22:02:44.736292667 +0000 UTC m=+2641.435051012" observedRunningTime="2025-12-08 22:02:45.347899126 +0000 UTC m=+2642.046657481" watchObservedRunningTime="2025-12-08 22:02:45.362445591 +0000 UTC m=+2642.061203956" Dec 08 22:02:49 crc kubenswrapper[4791]: I1208 22:02:49.198956 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:49 crc kubenswrapper[4791]: I1208 22:02:49.200604 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:50 crc kubenswrapper[4791]: I1208 22:02:50.258057 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4cqpw" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="registry-server" probeResult="failure" output=< Dec 08 22:02:50 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 22:02:50 crc kubenswrapper[4791]: > Dec 08 22:02:59 crc kubenswrapper[4791]: I1208 22:02:59.250578 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:59 crc kubenswrapper[4791]: I1208 22:02:59.303767 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:02:59 crc kubenswrapper[4791]: I1208 22:02:59.493773 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4cqpw"] Dec 08 22:03:00 crc kubenswrapper[4791]: I1208 22:03:00.477528 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4cqpw" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="registry-server" containerID="cri-o://1508071cdf5467c45ba86dca36f85e0a1cedaf7798141daef6720d56d161c3aa" gracePeriod=2 Dec 08 22:03:01 crc kubenswrapper[4791]: I1208 22:03:01.503340 4791 generic.go:334] "Generic (PLEG): container finished" podID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerID="1508071cdf5467c45ba86dca36f85e0a1cedaf7798141daef6720d56d161c3aa" exitCode=0 Dec 08 22:03:01 crc kubenswrapper[4791]: I1208 22:03:01.503411 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4cqpw" event={"ID":"f689c8df-841b-402b-b6f3-d5bdc56dca5e","Type":"ContainerDied","Data":"1508071cdf5467c45ba86dca36f85e0a1cedaf7798141daef6720d56d161c3aa"} Dec 08 22:03:01 crc kubenswrapper[4791]: I1208 22:03:01.975514 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.003441 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzzmp\" (UniqueName: \"kubernetes.io/projected/f689c8df-841b-402b-b6f3-d5bdc56dca5e-kube-api-access-zzzmp\") pod \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.003729 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-catalog-content\") pod \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.003790 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-utilities\") pod \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\" (UID: \"f689c8df-841b-402b-b6f3-d5bdc56dca5e\") " Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.005026 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-utilities" (OuterVolumeSpecName: "utilities") pod "f689c8df-841b-402b-b6f3-d5bdc56dca5e" (UID: "f689c8df-841b-402b-b6f3-d5bdc56dca5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.061486 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f689c8df-841b-402b-b6f3-d5bdc56dca5e-kube-api-access-zzzmp" (OuterVolumeSpecName: "kube-api-access-zzzmp") pod "f689c8df-841b-402b-b6f3-d5bdc56dca5e" (UID: "f689c8df-841b-402b-b6f3-d5bdc56dca5e"). InnerVolumeSpecName "kube-api-access-zzzmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.106644 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzzmp\" (UniqueName: \"kubernetes.io/projected/f689c8df-841b-402b-b6f3-d5bdc56dca5e-kube-api-access-zzzmp\") on node \"crc\" DevicePath \"\"" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.106683 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.133414 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f689c8df-841b-402b-b6f3-d5bdc56dca5e" (UID: "f689c8df-841b-402b-b6f3-d5bdc56dca5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.209360 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f689c8df-841b-402b-b6f3-d5bdc56dca5e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.527958 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4cqpw" event={"ID":"f689c8df-841b-402b-b6f3-d5bdc56dca5e","Type":"ContainerDied","Data":"44aa7283173a9823cd76de8e1ec5c10b1ffc8c663aaf2f3dd664015f8db6f37c"} Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.528021 4791 scope.go:117] "RemoveContainer" containerID="1508071cdf5467c45ba86dca36f85e0a1cedaf7798141daef6720d56d161c3aa" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.528194 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4cqpw" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.566580 4791 scope.go:117] "RemoveContainer" containerID="ba79b8c52af5506967fe7e21082ace45009fdb1f1ef498eb67e5941268200046" Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.582917 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4cqpw"] Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.590663 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4cqpw"] Dec 08 22:03:02 crc kubenswrapper[4791]: I1208 22:03:02.592524 4791 scope.go:117] "RemoveContainer" containerID="9a65f3cf5d377b47a990d8c6b67fbb6c46571ec2873785e73440f88b209418ee" Dec 08 22:03:03 crc kubenswrapper[4791]: I1208 22:03:03.618486 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" path="/var/lib/kubelet/pods/f689c8df-841b-402b-b6f3-d5bdc56dca5e/volumes" Dec 08 22:03:07 crc kubenswrapper[4791]: I1208 22:03:07.582048 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" exitCode=1 Dec 08 22:03:07 crc kubenswrapper[4791]: I1208 22:03:07.582115 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af"} Dec 08 22:03:07 crc kubenswrapper[4791]: I1208 22:03:07.582546 4791 scope.go:117] "RemoveContainer" containerID="ccc7edee626269a4ac3ac7f405ce41af1eb9e47254400a3bf52e57f2ec17175f" Dec 08 22:03:07 crc kubenswrapper[4791]: I1208 22:03:07.583885 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:03:07 crc kubenswrapper[4791]: E1208 22:03:07.584387 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:03:15 crc kubenswrapper[4791]: I1208 22:03:15.884089 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:03:15 crc kubenswrapper[4791]: I1208 22:03:15.885403 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:03:15 crc kubenswrapper[4791]: E1208 22:03:15.885754 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:03:25 crc kubenswrapper[4791]: I1208 22:03:25.884304 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:03:25 crc kubenswrapper[4791]: I1208 22:03:25.887285 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:03:25 crc kubenswrapper[4791]: E1208 22:03:25.887729 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:03:40 crc kubenswrapper[4791]: I1208 22:03:40.598598 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:03:40 crc kubenswrapper[4791]: E1208 22:03:40.599491 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:03:51 crc kubenswrapper[4791]: I1208 22:03:51.611949 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:03:51 crc kubenswrapper[4791]: E1208 22:03:51.612913 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:04:06 crc kubenswrapper[4791]: I1208 22:04:06.597939 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:04:06 crc kubenswrapper[4791]: E1208 22:04:06.598599 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:04:18 crc kubenswrapper[4791]: I1208 22:04:18.598399 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:04:18 crc kubenswrapper[4791]: E1208 22:04:18.599169 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.226557 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hqpj8"] Dec 08 22:04:27 crc kubenswrapper[4791]: E1208 22:04:27.227526 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="registry-server" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.227542 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="registry-server" Dec 08 22:04:27 crc kubenswrapper[4791]: E1208 22:04:27.227557 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="extract-content" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.227563 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="extract-content" Dec 08 22:04:27 crc kubenswrapper[4791]: E1208 22:04:27.227588 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="extract-utilities" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.227595 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="extract-utilities" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.228043 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f689c8df-841b-402b-b6f3-d5bdc56dca5e" containerName="registry-server" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.229966 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.246462 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqpj8"] Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.316912 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-utilities\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.317344 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7ct9\" (UniqueName: \"kubernetes.io/projected/f0d33661-88c6-4b29-8c74-89cd3ea52570-kube-api-access-z7ct9\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.317480 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-catalog-content\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.420190 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-utilities\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.420326 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7ct9\" (UniqueName: \"kubernetes.io/projected/f0d33661-88c6-4b29-8c74-89cd3ea52570-kube-api-access-z7ct9\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.420354 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-catalog-content\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.420934 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-catalog-content\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.421054 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-utilities\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.443165 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7ct9\" (UniqueName: \"kubernetes.io/projected/f0d33661-88c6-4b29-8c74-89cd3ea52570-kube-api-access-z7ct9\") pod \"community-operators-hqpj8\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:27 crc kubenswrapper[4791]: I1208 22:04:27.551256 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:28 crc kubenswrapper[4791]: I1208 22:04:28.224036 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqpj8"] Dec 08 22:04:28 crc kubenswrapper[4791]: I1208 22:04:28.394196 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpj8" event={"ID":"f0d33661-88c6-4b29-8c74-89cd3ea52570","Type":"ContainerStarted","Data":"46bedcd2b9aa845b1a9c408c1fb3c448afcd55d505961a1c793ad051aca99cad"} Dec 08 22:04:29 crc kubenswrapper[4791]: I1208 22:04:29.405920 4791 generic.go:334] "Generic (PLEG): container finished" podID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerID="0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6" exitCode=0 Dec 08 22:04:29 crc kubenswrapper[4791]: I1208 22:04:29.406026 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpj8" event={"ID":"f0d33661-88c6-4b29-8c74-89cd3ea52570","Type":"ContainerDied","Data":"0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6"} Dec 08 22:04:31 crc kubenswrapper[4791]: I1208 22:04:31.436548 4791 generic.go:334] "Generic (PLEG): container finished" podID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerID="22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35" exitCode=0 Dec 08 22:04:31 crc kubenswrapper[4791]: I1208 22:04:31.436620 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpj8" event={"ID":"f0d33661-88c6-4b29-8c74-89cd3ea52570","Type":"ContainerDied","Data":"22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35"} Dec 08 22:04:33 crc kubenswrapper[4791]: I1208 22:04:33.479185 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpj8" event={"ID":"f0d33661-88c6-4b29-8c74-89cd3ea52570","Type":"ContainerStarted","Data":"33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d"} Dec 08 22:04:33 crc kubenswrapper[4791]: I1208 22:04:33.507050 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hqpj8" podStartSLOduration=3.76764683 podStartE2EDuration="6.507026713s" podCreationTimestamp="2025-12-08 22:04:27 +0000 UTC" firstStartedPulling="2025-12-08 22:04:29.409492013 +0000 UTC m=+2746.108250358" lastFinishedPulling="2025-12-08 22:04:32.148871896 +0000 UTC m=+2748.847630241" observedRunningTime="2025-12-08 22:04:33.497363761 +0000 UTC m=+2750.196122106" watchObservedRunningTime="2025-12-08 22:04:33.507026713 +0000 UTC m=+2750.205785058" Dec 08 22:04:33 crc kubenswrapper[4791]: I1208 22:04:33.605965 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:04:33 crc kubenswrapper[4791]: E1208 22:04:33.606280 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:04:35 crc kubenswrapper[4791]: I1208 22:04:35.251158 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:04:35 crc kubenswrapper[4791]: I1208 22:04:35.251526 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:04:37 crc kubenswrapper[4791]: I1208 22:04:37.622933 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:37 crc kubenswrapper[4791]: I1208 22:04:37.623253 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:37 crc kubenswrapper[4791]: I1208 22:04:37.669154 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:38 crc kubenswrapper[4791]: I1208 22:04:38.681423 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:38 crc kubenswrapper[4791]: I1208 22:04:38.740138 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hqpj8"] Dec 08 22:04:40 crc kubenswrapper[4791]: I1208 22:04:40.650554 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hqpj8" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="registry-server" containerID="cri-o://33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d" gracePeriod=2 Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.220139 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.332685 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-catalog-content\") pod \"f0d33661-88c6-4b29-8c74-89cd3ea52570\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.332811 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7ct9\" (UniqueName: \"kubernetes.io/projected/f0d33661-88c6-4b29-8c74-89cd3ea52570-kube-api-access-z7ct9\") pod \"f0d33661-88c6-4b29-8c74-89cd3ea52570\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.332983 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-utilities\") pod \"f0d33661-88c6-4b29-8c74-89cd3ea52570\" (UID: \"f0d33661-88c6-4b29-8c74-89cd3ea52570\") " Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.334168 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-utilities" (OuterVolumeSpecName: "utilities") pod "f0d33661-88c6-4b29-8c74-89cd3ea52570" (UID: "f0d33661-88c6-4b29-8c74-89cd3ea52570"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.341010 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0d33661-88c6-4b29-8c74-89cd3ea52570-kube-api-access-z7ct9" (OuterVolumeSpecName: "kube-api-access-z7ct9") pod "f0d33661-88c6-4b29-8c74-89cd3ea52570" (UID: "f0d33661-88c6-4b29-8c74-89cd3ea52570"). InnerVolumeSpecName "kube-api-access-z7ct9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.383418 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0d33661-88c6-4b29-8c74-89cd3ea52570" (UID: "f0d33661-88c6-4b29-8c74-89cd3ea52570"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.436971 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.437015 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7ct9\" (UniqueName: \"kubernetes.io/projected/f0d33661-88c6-4b29-8c74-89cd3ea52570-kube-api-access-z7ct9\") on node \"crc\" DevicePath \"\"" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.437031 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0d33661-88c6-4b29-8c74-89cd3ea52570-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.661915 4791 generic.go:334] "Generic (PLEG): container finished" podID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerID="33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d" exitCode=0 Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.661965 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpj8" event={"ID":"f0d33661-88c6-4b29-8c74-89cd3ea52570","Type":"ContainerDied","Data":"33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d"} Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.662001 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpj8" event={"ID":"f0d33661-88c6-4b29-8c74-89cd3ea52570","Type":"ContainerDied","Data":"46bedcd2b9aa845b1a9c408c1fb3c448afcd55d505961a1c793ad051aca99cad"} Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.662021 4791 scope.go:117] "RemoveContainer" containerID="33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.662217 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpj8" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.692020 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hqpj8"] Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.694521 4791 scope.go:117] "RemoveContainer" containerID="22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.702838 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hqpj8"] Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.720285 4791 scope.go:117] "RemoveContainer" containerID="0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.774309 4791 scope.go:117] "RemoveContainer" containerID="33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d" Dec 08 22:04:41 crc kubenswrapper[4791]: E1208 22:04:41.775816 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d\": container with ID starting with 33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d not found: ID does not exist" containerID="33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.775882 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d"} err="failed to get container status \"33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d\": rpc error: code = NotFound desc = could not find container \"33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d\": container with ID starting with 33ef028ff0f8a44aa4c68480878cc35b791739dee6ec44d2dc8b45fb048dbf8d not found: ID does not exist" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.775915 4791 scope.go:117] "RemoveContainer" containerID="22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35" Dec 08 22:04:41 crc kubenswrapper[4791]: E1208 22:04:41.776286 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35\": container with ID starting with 22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35 not found: ID does not exist" containerID="22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.776306 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35"} err="failed to get container status \"22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35\": rpc error: code = NotFound desc = could not find container \"22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35\": container with ID starting with 22b023f7583b087cb7df8e4cb69ea6334c8a2400c094e343dcb5fd5186df1b35 not found: ID does not exist" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.776324 4791 scope.go:117] "RemoveContainer" containerID="0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6" Dec 08 22:04:41 crc kubenswrapper[4791]: E1208 22:04:41.776551 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6\": container with ID starting with 0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6 not found: ID does not exist" containerID="0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6" Dec 08 22:04:41 crc kubenswrapper[4791]: I1208 22:04:41.776573 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6"} err="failed to get container status \"0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6\": rpc error: code = NotFound desc = could not find container \"0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6\": container with ID starting with 0b353e4f5d679cfca487a6aca8721f7be7b77661b782a94049444adfcb2c1cd6 not found: ID does not exist" Dec 08 22:04:43 crc kubenswrapper[4791]: I1208 22:04:43.610893 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" path="/var/lib/kubelet/pods/f0d33661-88c6-4b29-8c74-89cd3ea52570/volumes" Dec 08 22:04:47 crc kubenswrapper[4791]: I1208 22:04:47.599177 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:04:47 crc kubenswrapper[4791]: E1208 22:04:47.600068 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:04:59 crc kubenswrapper[4791]: I1208 22:04:59.598598 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:04:59 crc kubenswrapper[4791]: E1208 22:04:59.599330 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:05:05 crc kubenswrapper[4791]: I1208 22:05:05.251051 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:05:05 crc kubenswrapper[4791]: I1208 22:05:05.251359 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:05:12 crc kubenswrapper[4791]: I1208 22:05:12.597981 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:05:12 crc kubenswrapper[4791]: E1208 22:05:12.598744 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:05:26 crc kubenswrapper[4791]: I1208 22:05:26.598514 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:05:26 crc kubenswrapper[4791]: E1208 22:05:26.599357 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.251049 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.251599 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.251673 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.252698 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b357e5859d4c54accba7d3e26bc4b456e8611ccfcc56e88550c314e55fc6dc4"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.252765 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://8b357e5859d4c54accba7d3e26bc4b456e8611ccfcc56e88550c314e55fc6dc4" gracePeriod=600 Dec 08 22:05:35 crc kubenswrapper[4791]: E1208 22:05:35.324293 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cdfecf8_95cf_4c2b_b98f_eb7bb055771d.slice/crio-conmon-8b357e5859d4c54accba7d3e26bc4b456e8611ccfcc56e88550c314e55fc6dc4.scope\": RecentStats: unable to find data in memory cache]" Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.415411 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="8b357e5859d4c54accba7d3e26bc4b456e8611ccfcc56e88550c314e55fc6dc4" exitCode=0 Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.415461 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"8b357e5859d4c54accba7d3e26bc4b456e8611ccfcc56e88550c314e55fc6dc4"} Dec 08 22:05:35 crc kubenswrapper[4791]: I1208 22:05:35.415493 4791 scope.go:117] "RemoveContainer" containerID="1eafb2d795bd7d970812895a056ceb1aa3970c27b9c7609ac09be6747381bc3c" Dec 08 22:05:36 crc kubenswrapper[4791]: I1208 22:05:36.427430 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4"} Dec 08 22:05:37 crc kubenswrapper[4791]: I1208 22:05:37.598131 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:05:37 crc kubenswrapper[4791]: E1208 22:05:37.598456 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:05:52 crc kubenswrapper[4791]: I1208 22:05:52.597834 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:05:52 crc kubenswrapper[4791]: E1208 22:05:52.598646 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:06:06 crc kubenswrapper[4791]: I1208 22:06:06.598009 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:06:06 crc kubenswrapper[4791]: E1208 22:06:06.598632 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:06:17 crc kubenswrapper[4791]: I1208 22:06:17.598810 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:06:17 crc kubenswrapper[4791]: E1208 22:06:17.600470 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:06:31 crc kubenswrapper[4791]: I1208 22:06:31.598744 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:06:31 crc kubenswrapper[4791]: E1208 22:06:31.599432 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:06:43 crc kubenswrapper[4791]: I1208 22:06:43.607876 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:06:43 crc kubenswrapper[4791]: E1208 22:06:43.609001 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:06:54 crc kubenswrapper[4791]: I1208 22:06:54.598734 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:06:54 crc kubenswrapper[4791]: E1208 22:06:54.599473 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:07:06 crc kubenswrapper[4791]: I1208 22:07:06.598374 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:07:06 crc kubenswrapper[4791]: E1208 22:07:06.599177 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:07:18 crc kubenswrapper[4791]: I1208 22:07:18.597629 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:07:18 crc kubenswrapper[4791]: E1208 22:07:18.598547 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:07:33 crc kubenswrapper[4791]: I1208 22:07:33.607386 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:07:33 crc kubenswrapper[4791]: E1208 22:07:33.608286 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:07:35 crc kubenswrapper[4791]: I1208 22:07:35.251173 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:07:35 crc kubenswrapper[4791]: I1208 22:07:35.251257 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:07:46 crc kubenswrapper[4791]: I1208 22:07:46.601425 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:07:46 crc kubenswrapper[4791]: E1208 22:07:46.603666 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:08:01 crc kubenswrapper[4791]: I1208 22:08:01.597868 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:08:01 crc kubenswrapper[4791]: E1208 22:08:01.598616 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:08:05 crc kubenswrapper[4791]: I1208 22:08:05.251172 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:08:05 crc kubenswrapper[4791]: I1208 22:08:05.251675 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:08:15 crc kubenswrapper[4791]: I1208 22:08:15.626814 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:08:16 crc kubenswrapper[4791]: I1208 22:08:16.419021 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d"} Dec 08 22:08:16 crc kubenswrapper[4791]: I1208 22:08:16.419839 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:08:25 crc kubenswrapper[4791]: I1208 22:08:25.888412 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.252039 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.252596 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.252651 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.253857 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.253921 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" gracePeriod=600 Dec 08 22:08:35 crc kubenswrapper[4791]: E1208 22:08:35.376428 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.605426 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" exitCode=0 Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.611652 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4"} Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.611734 4791 scope.go:117] "RemoveContainer" containerID="8b357e5859d4c54accba7d3e26bc4b456e8611ccfcc56e88550c314e55fc6dc4" Dec 08 22:08:35 crc kubenswrapper[4791]: I1208 22:08:35.612935 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:08:35 crc kubenswrapper[4791]: E1208 22:08:35.613469 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:08:49 crc kubenswrapper[4791]: I1208 22:08:49.598941 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:08:49 crc kubenswrapper[4791]: E1208 22:08:49.599779 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:09:01 crc kubenswrapper[4791]: I1208 22:09:01.600076 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:09:01 crc kubenswrapper[4791]: E1208 22:09:01.601150 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:09:16 crc kubenswrapper[4791]: I1208 22:09:16.597697 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:09:16 crc kubenswrapper[4791]: E1208 22:09:16.598602 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:09:30 crc kubenswrapper[4791]: I1208 22:09:30.598602 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:09:30 crc kubenswrapper[4791]: E1208 22:09:30.599514 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:09:45 crc kubenswrapper[4791]: I1208 22:09:45.598327 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:09:45 crc kubenswrapper[4791]: E1208 22:09:45.600605 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:09:56 crc kubenswrapper[4791]: I1208 22:09:56.597741 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:09:56 crc kubenswrapper[4791]: E1208 22:09:56.598494 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:10:09 crc kubenswrapper[4791]: I1208 22:10:09.598544 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:10:09 crc kubenswrapper[4791]: E1208 22:10:09.599256 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:10:24 crc kubenswrapper[4791]: I1208 22:10:24.597878 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:10:24 crc kubenswrapper[4791]: E1208 22:10:24.598653 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:10:39 crc kubenswrapper[4791]: I1208 22:10:39.598855 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:10:39 crc kubenswrapper[4791]: E1208 22:10:39.599636 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:10:51 crc kubenswrapper[4791]: I1208 22:10:51.598457 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:10:51 crc kubenswrapper[4791]: E1208 22:10:51.599341 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:10:54 crc kubenswrapper[4791]: I1208 22:10:54.125156 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" exitCode=1 Dec 08 22:10:54 crc kubenswrapper[4791]: I1208 22:10:54.125252 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d"} Dec 08 22:10:54 crc kubenswrapper[4791]: I1208 22:10:54.125768 4791 scope.go:117] "RemoveContainer" containerID="44fa0a94e5bd598fdbbaee16e5e28873de9d87b1abdb7377c908f08871b158af" Dec 08 22:10:54 crc kubenswrapper[4791]: I1208 22:10:54.126677 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:10:54 crc kubenswrapper[4791]: E1208 22:10:54.127113 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:10:55 crc kubenswrapper[4791]: I1208 22:10:55.884789 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:10:56 crc kubenswrapper[4791]: I1208 22:10:56.011967 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:10:56 crc kubenswrapper[4791]: E1208 22:10:56.023901 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:11:05 crc kubenswrapper[4791]: I1208 22:11:05.598298 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:11:05 crc kubenswrapper[4791]: E1208 22:11:05.599212 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:11:05 crc kubenswrapper[4791]: I1208 22:11:05.885304 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:11:05 crc kubenswrapper[4791]: I1208 22:11:05.886555 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:11:05 crc kubenswrapper[4791]: E1208 22:11:05.886936 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:11:19 crc kubenswrapper[4791]: I1208 22:11:19.598096 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:11:19 crc kubenswrapper[4791]: E1208 22:11:19.599050 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:11:20 crc kubenswrapper[4791]: I1208 22:11:20.598064 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:11:20 crc kubenswrapper[4791]: E1208 22:11:20.598683 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:11:32 crc kubenswrapper[4791]: I1208 22:11:32.598246 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:11:32 crc kubenswrapper[4791]: E1208 22:11:32.599211 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:11:33 crc kubenswrapper[4791]: I1208 22:11:33.607858 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:11:33 crc kubenswrapper[4791]: E1208 22:11:33.608292 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:11:47 crc kubenswrapper[4791]: I1208 22:11:47.598651 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:11:47 crc kubenswrapper[4791]: E1208 22:11:47.599646 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:11:48 crc kubenswrapper[4791]: I1208 22:11:48.598557 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:11:48 crc kubenswrapper[4791]: E1208 22:11:48.598926 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:12:00 crc kubenswrapper[4791]: I1208 22:12:00.598309 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:12:00 crc kubenswrapper[4791]: E1208 22:12:00.599319 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:12:02 crc kubenswrapper[4791]: I1208 22:12:02.597836 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:12:02 crc kubenswrapper[4791]: E1208 22:12:02.598370 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.219218 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g9ttn"] Dec 08 22:12:03 crc kubenswrapper[4791]: E1208 22:12:03.220018 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="extract-content" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.220046 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="extract-content" Dec 08 22:12:03 crc kubenswrapper[4791]: E1208 22:12:03.220090 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="extract-utilities" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.220103 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="extract-utilities" Dec 08 22:12:03 crc kubenswrapper[4791]: E1208 22:12:03.220123 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="registry-server" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.220131 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="registry-server" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.220482 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0d33661-88c6-4b29-8c74-89cd3ea52570" containerName="registry-server" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.222592 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.229154 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g9ttn"] Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.289941 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-catalog-content\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.290290 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rcj7\" (UniqueName: \"kubernetes.io/projected/5bd192cd-22c5-4b64-8eef-f58815da1e3f-kube-api-access-8rcj7\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.290320 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-utilities\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.392810 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-catalog-content\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.392895 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rcj7\" (UniqueName: \"kubernetes.io/projected/5bd192cd-22c5-4b64-8eef-f58815da1e3f-kube-api-access-8rcj7\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.392931 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-utilities\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.393266 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-catalog-content\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.393365 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-utilities\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.454031 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rcj7\" (UniqueName: \"kubernetes.io/projected/5bd192cd-22c5-4b64-8eef-f58815da1e3f-kube-api-access-8rcj7\") pod \"certified-operators-g9ttn\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:03 crc kubenswrapper[4791]: I1208 22:12:03.551375 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:04 crc kubenswrapper[4791]: I1208 22:12:04.155648 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g9ttn"] Dec 08 22:12:05 crc kubenswrapper[4791]: I1208 22:12:05.109448 4791 generic.go:334] "Generic (PLEG): container finished" podID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerID="2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b" exitCode=0 Dec 08 22:12:05 crc kubenswrapper[4791]: I1208 22:12:05.109556 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9ttn" event={"ID":"5bd192cd-22c5-4b64-8eef-f58815da1e3f","Type":"ContainerDied","Data":"2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b"} Dec 08 22:12:05 crc kubenswrapper[4791]: I1208 22:12:05.110255 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9ttn" event={"ID":"5bd192cd-22c5-4b64-8eef-f58815da1e3f","Type":"ContainerStarted","Data":"dcdbc435b4f691296f978f55f24fd4cd4be525c3bdbe86f835403b0badb0ea10"} Dec 08 22:12:05 crc kubenswrapper[4791]: I1208 22:12:05.112882 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.137340 4791 generic.go:334] "Generic (PLEG): container finished" podID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerID="401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20" exitCode=0 Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.137430 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9ttn" event={"ID":"5bd192cd-22c5-4b64-8eef-f58815da1e3f","Type":"ContainerDied","Data":"401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20"} Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.596135 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kbmlf"] Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.600456 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.625168 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbmlf"] Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.731950 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k648w\" (UniqueName: \"kubernetes.io/projected/c386691d-259f-4a02-948b-622b696570c3-kube-api-access-k648w\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.732429 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-utilities\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.733071 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-catalog-content\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.835146 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k648w\" (UniqueName: \"kubernetes.io/projected/c386691d-259f-4a02-948b-622b696570c3-kube-api-access-k648w\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.835230 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-utilities\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.835362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-catalog-content\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.835921 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-catalog-content\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.836019 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-utilities\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.859482 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k648w\" (UniqueName: \"kubernetes.io/projected/c386691d-259f-4a02-948b-622b696570c3-kube-api-access-k648w\") pod \"redhat-marketplace-kbmlf\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:07 crc kubenswrapper[4791]: I1208 22:12:07.940741 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:08 crc kubenswrapper[4791]: I1208 22:12:08.158033 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9ttn" event={"ID":"5bd192cd-22c5-4b64-8eef-f58815da1e3f","Type":"ContainerStarted","Data":"2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027"} Dec 08 22:12:08 crc kubenswrapper[4791]: I1208 22:12:08.193484 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g9ttn" podStartSLOduration=2.720093595 podStartE2EDuration="5.193462422s" podCreationTimestamp="2025-12-08 22:12:03 +0000 UTC" firstStartedPulling="2025-12-08 22:12:05.112609281 +0000 UTC m=+3201.811367636" lastFinishedPulling="2025-12-08 22:12:07.585978118 +0000 UTC m=+3204.284736463" observedRunningTime="2025-12-08 22:12:08.177973726 +0000 UTC m=+3204.876732071" watchObservedRunningTime="2025-12-08 22:12:08.193462422 +0000 UTC m=+3204.892220767" Dec 08 22:12:08 crc kubenswrapper[4791]: I1208 22:12:08.538031 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbmlf"] Dec 08 22:12:09 crc kubenswrapper[4791]: I1208 22:12:09.171624 4791 generic.go:334] "Generic (PLEG): container finished" podID="c386691d-259f-4a02-948b-622b696570c3" containerID="c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481" exitCode=0 Dec 08 22:12:09 crc kubenswrapper[4791]: I1208 22:12:09.171780 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbmlf" event={"ID":"c386691d-259f-4a02-948b-622b696570c3","Type":"ContainerDied","Data":"c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481"} Dec 08 22:12:09 crc kubenswrapper[4791]: I1208 22:12:09.172907 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbmlf" event={"ID":"c386691d-259f-4a02-948b-622b696570c3","Type":"ContainerStarted","Data":"bcdc09338c396b18e7fd90f5206877a21f111f3a80ddeec1e256d77b180d7035"} Dec 08 22:12:10 crc kubenswrapper[4791]: I1208 22:12:10.184506 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbmlf" event={"ID":"c386691d-259f-4a02-948b-622b696570c3","Type":"ContainerStarted","Data":"5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02"} Dec 08 22:12:12 crc kubenswrapper[4791]: I1208 22:12:12.207206 4791 generic.go:334] "Generic (PLEG): container finished" podID="c386691d-259f-4a02-948b-622b696570c3" containerID="5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02" exitCode=0 Dec 08 22:12:12 crc kubenswrapper[4791]: I1208 22:12:12.207318 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbmlf" event={"ID":"c386691d-259f-4a02-948b-622b696570c3","Type":"ContainerDied","Data":"5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02"} Dec 08 22:12:13 crc kubenswrapper[4791]: I1208 22:12:13.219303 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbmlf" event={"ID":"c386691d-259f-4a02-948b-622b696570c3","Type":"ContainerStarted","Data":"6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780"} Dec 08 22:12:13 crc kubenswrapper[4791]: I1208 22:12:13.254579 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kbmlf" podStartSLOduration=2.606360461 podStartE2EDuration="6.254562535s" podCreationTimestamp="2025-12-08 22:12:07 +0000 UTC" firstStartedPulling="2025-12-08 22:12:09.173954578 +0000 UTC m=+3205.872712913" lastFinishedPulling="2025-12-08 22:12:12.822156642 +0000 UTC m=+3209.520914987" observedRunningTime="2025-12-08 22:12:13.249063948 +0000 UTC m=+3209.947822293" watchObservedRunningTime="2025-12-08 22:12:13.254562535 +0000 UTC m=+3209.953320870" Dec 08 22:12:13 crc kubenswrapper[4791]: I1208 22:12:13.552291 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:13 crc kubenswrapper[4791]: I1208 22:12:13.552440 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:13 crc kubenswrapper[4791]: I1208 22:12:13.604948 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:12:13 crc kubenswrapper[4791]: E1208 22:12:13.605486 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:12:13 crc kubenswrapper[4791]: I1208 22:12:13.612478 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:14 crc kubenswrapper[4791]: I1208 22:12:14.293982 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:15 crc kubenswrapper[4791]: I1208 22:12:15.598238 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:12:15 crc kubenswrapper[4791]: E1208 22:12:15.598839 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:12:15 crc kubenswrapper[4791]: I1208 22:12:15.786190 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g9ttn"] Dec 08 22:12:17 crc kubenswrapper[4791]: I1208 22:12:17.260455 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g9ttn" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="registry-server" containerID="cri-o://2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027" gracePeriod=2 Dec 08 22:12:17 crc kubenswrapper[4791]: I1208 22:12:17.809157 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:17 crc kubenswrapper[4791]: I1208 22:12:17.941981 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:17 crc kubenswrapper[4791]: I1208 22:12:17.942696 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.005822 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rcj7\" (UniqueName: \"kubernetes.io/projected/5bd192cd-22c5-4b64-8eef-f58815da1e3f-kube-api-access-8rcj7\") pod \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.006426 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-catalog-content\") pod \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.006556 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-utilities\") pod \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\" (UID: \"5bd192cd-22c5-4b64-8eef-f58815da1e3f\") " Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.007896 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.007906 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-utilities" (OuterVolumeSpecName: "utilities") pod "5bd192cd-22c5-4b64-8eef-f58815da1e3f" (UID: "5bd192cd-22c5-4b64-8eef-f58815da1e3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.065015 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5bd192cd-22c5-4b64-8eef-f58815da1e3f" (UID: "5bd192cd-22c5-4b64-8eef-f58815da1e3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.638235 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.680649 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bd192cd-22c5-4b64-8eef-f58815da1e3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.674679 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bd192cd-22c5-4b64-8eef-f58815da1e3f-kube-api-access-8rcj7" (OuterVolumeSpecName: "kube-api-access-8rcj7") pod "5bd192cd-22c5-4b64-8eef-f58815da1e3f" (UID: "5bd192cd-22c5-4b64-8eef-f58815da1e3f"). InnerVolumeSpecName "kube-api-access-8rcj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.690471 4791 generic.go:334] "Generic (PLEG): container finished" podID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerID="2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027" exitCode=0 Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.694561 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g9ttn" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.700182 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9ttn" event={"ID":"5bd192cd-22c5-4b64-8eef-f58815da1e3f","Type":"ContainerDied","Data":"2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027"} Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.700453 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g9ttn" event={"ID":"5bd192cd-22c5-4b64-8eef-f58815da1e3f","Type":"ContainerDied","Data":"dcdbc435b4f691296f978f55f24fd4cd4be525c3bdbe86f835403b0badb0ea10"} Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.700567 4791 scope.go:117] "RemoveContainer" containerID="2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027" Dec 08 22:12:18 crc kubenswrapper[4791]: E1208 22:12:18.707449 4791 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/certified-operators-g9ttn_openshift-marketplace_registry-server-2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027.log: no such file or directory" path="/var/log/containers/certified-operators-g9ttn_openshift-marketplace_registry-server-2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027.log" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.758239 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g9ttn"] Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.769143 4791 scope.go:117] "RemoveContainer" containerID="401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.775576 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g9ttn"] Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.783269 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.785668 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rcj7\" (UniqueName: \"kubernetes.io/projected/5bd192cd-22c5-4b64-8eef-f58815da1e3f-kube-api-access-8rcj7\") on node \"crc\" DevicePath \"\"" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.819915 4791 scope.go:117] "RemoveContainer" containerID="2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.871136 4791 scope.go:117] "RemoveContainer" containerID="2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027" Dec 08 22:12:18 crc kubenswrapper[4791]: E1208 22:12:18.871748 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027\": container with ID starting with 2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027 not found: ID does not exist" containerID="2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.871794 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027"} err="failed to get container status \"2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027\": rpc error: code = NotFound desc = could not find container \"2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027\": container with ID starting with 2b7f0b98d1523b2bd97cea9229c989d96d05afba22c1905b9df03a5c581aa027 not found: ID does not exist" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.871823 4791 scope.go:117] "RemoveContainer" containerID="401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20" Dec 08 22:12:18 crc kubenswrapper[4791]: E1208 22:12:18.872301 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20\": container with ID starting with 401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20 not found: ID does not exist" containerID="401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.872337 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20"} err="failed to get container status \"401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20\": rpc error: code = NotFound desc = could not find container \"401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20\": container with ID starting with 401efa988c7fb9ffca5dcf36098c23c2bf4080cbd2ffba85228b8b52e1de6c20 not found: ID does not exist" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.872365 4791 scope.go:117] "RemoveContainer" containerID="2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b" Dec 08 22:12:18 crc kubenswrapper[4791]: E1208 22:12:18.872800 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b\": container with ID starting with 2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b not found: ID does not exist" containerID="2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b" Dec 08 22:12:18 crc kubenswrapper[4791]: I1208 22:12:18.872834 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b"} err="failed to get container status \"2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b\": rpc error: code = NotFound desc = could not find container \"2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b\": container with ID starting with 2224f57632d297d1fb52f1346925a24a57ecdc534cbee76da9c35a987bb6489b not found: ID does not exist" Dec 08 22:12:19 crc kubenswrapper[4791]: I1208 22:12:19.586014 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbmlf"] Dec 08 22:12:19 crc kubenswrapper[4791]: I1208 22:12:19.616081 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" path="/var/lib/kubelet/pods/5bd192cd-22c5-4b64-8eef-f58815da1e3f/volumes" Dec 08 22:12:20 crc kubenswrapper[4791]: I1208 22:12:20.711207 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kbmlf" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="registry-server" containerID="cri-o://6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780" gracePeriod=2 Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.272556 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.349019 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k648w\" (UniqueName: \"kubernetes.io/projected/c386691d-259f-4a02-948b-622b696570c3-kube-api-access-k648w\") pod \"c386691d-259f-4a02-948b-622b696570c3\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.349217 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-utilities\") pod \"c386691d-259f-4a02-948b-622b696570c3\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.349329 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-catalog-content\") pod \"c386691d-259f-4a02-948b-622b696570c3\" (UID: \"c386691d-259f-4a02-948b-622b696570c3\") " Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.350557 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-utilities" (OuterVolumeSpecName: "utilities") pod "c386691d-259f-4a02-948b-622b696570c3" (UID: "c386691d-259f-4a02-948b-622b696570c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.356287 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c386691d-259f-4a02-948b-622b696570c3-kube-api-access-k648w" (OuterVolumeSpecName: "kube-api-access-k648w") pod "c386691d-259f-4a02-948b-622b696570c3" (UID: "c386691d-259f-4a02-948b-622b696570c3"). InnerVolumeSpecName "kube-api-access-k648w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.372146 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c386691d-259f-4a02-948b-622b696570c3" (UID: "c386691d-259f-4a02-948b-622b696570c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.451577 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.451861 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c386691d-259f-4a02-948b-622b696570c3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.451876 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k648w\" (UniqueName: \"kubernetes.io/projected/c386691d-259f-4a02-948b-622b696570c3-kube-api-access-k648w\") on node \"crc\" DevicePath \"\"" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.725543 4791 generic.go:334] "Generic (PLEG): container finished" podID="c386691d-259f-4a02-948b-622b696570c3" containerID="6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780" exitCode=0 Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.725584 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbmlf" event={"ID":"c386691d-259f-4a02-948b-622b696570c3","Type":"ContainerDied","Data":"6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780"} Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.725610 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbmlf" event={"ID":"c386691d-259f-4a02-948b-622b696570c3","Type":"ContainerDied","Data":"bcdc09338c396b18e7fd90f5206877a21f111f3a80ddeec1e256d77b180d7035"} Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.725626 4791 scope.go:117] "RemoveContainer" containerID="6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.725762 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbmlf" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.753733 4791 scope.go:117] "RemoveContainer" containerID="5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.757814 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbmlf"] Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.777041 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbmlf"] Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.780250 4791 scope.go:117] "RemoveContainer" containerID="c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.832292 4791 scope.go:117] "RemoveContainer" containerID="6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780" Dec 08 22:12:21 crc kubenswrapper[4791]: E1208 22:12:21.832671 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780\": container with ID starting with 6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780 not found: ID does not exist" containerID="6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.832721 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780"} err="failed to get container status \"6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780\": rpc error: code = NotFound desc = could not find container \"6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780\": container with ID starting with 6b69137c063d44de29670391b2bbf2fad5b75246fca8f6d5a69c8eab4d9bf780 not found: ID does not exist" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.832752 4791 scope.go:117] "RemoveContainer" containerID="5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02" Dec 08 22:12:21 crc kubenswrapper[4791]: E1208 22:12:21.832996 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02\": container with ID starting with 5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02 not found: ID does not exist" containerID="5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.833017 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02"} err="failed to get container status \"5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02\": rpc error: code = NotFound desc = could not find container \"5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02\": container with ID starting with 5d5dc684cc0e07fa3a15255b199271fd8b49c1379efcf8f8e95f084da0127a02 not found: ID does not exist" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.833033 4791 scope.go:117] "RemoveContainer" containerID="c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481" Dec 08 22:12:21 crc kubenswrapper[4791]: E1208 22:12:21.833423 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481\": container with ID starting with c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481 not found: ID does not exist" containerID="c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481" Dec 08 22:12:21 crc kubenswrapper[4791]: I1208 22:12:21.833493 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481"} err="failed to get container status \"c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481\": rpc error: code = NotFound desc = could not find container \"c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481\": container with ID starting with c596cba0bab43af4ff6e7c15d2b71bba837be21bbc35cf5eeee7d46998a6f481 not found: ID does not exist" Dec 08 22:12:23 crc kubenswrapper[4791]: I1208 22:12:23.610969 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c386691d-259f-4a02-948b-622b696570c3" path="/var/lib/kubelet/pods/c386691d-259f-4a02-948b-622b696570c3/volumes" Dec 08 22:12:24 crc kubenswrapper[4791]: I1208 22:12:24.598856 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:12:24 crc kubenswrapper[4791]: E1208 22:12:24.599434 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:12:30 crc kubenswrapper[4791]: I1208 22:12:30.599516 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:12:30 crc kubenswrapper[4791]: E1208 22:12:30.600295 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:12:35 crc kubenswrapper[4791]: I1208 22:12:35.602062 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:12:35 crc kubenswrapper[4791]: E1208 22:12:35.602854 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.711649 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fkf8w"] Dec 08 22:12:39 crc kubenswrapper[4791]: E1208 22:12:39.712856 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="registry-server" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.712874 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="registry-server" Dec 08 22:12:39 crc kubenswrapper[4791]: E1208 22:12:39.712896 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="extract-utilities" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.712905 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="extract-utilities" Dec 08 22:12:39 crc kubenswrapper[4791]: E1208 22:12:39.712922 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="extract-content" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.712929 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="extract-content" Dec 08 22:12:39 crc kubenswrapper[4791]: E1208 22:12:39.712945 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="extract-content" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.712952 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="extract-content" Dec 08 22:12:39 crc kubenswrapper[4791]: E1208 22:12:39.712965 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="registry-server" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.712970 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="registry-server" Dec 08 22:12:39 crc kubenswrapper[4791]: E1208 22:12:39.712983 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="extract-utilities" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.712988 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="extract-utilities" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.713239 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bd192cd-22c5-4b64-8eef-f58815da1e3f" containerName="registry-server" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.713280 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c386691d-259f-4a02-948b-622b696570c3" containerName="registry-server" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.725793 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fkf8w"] Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.725914 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.833878 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvvjc\" (UniqueName: \"kubernetes.io/projected/9fe07170-4c57-4348-a953-667497743fd6-kube-api-access-wvvjc\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.834250 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-catalog-content\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.834524 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-utilities\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.936568 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvvjc\" (UniqueName: \"kubernetes.io/projected/9fe07170-4c57-4348-a953-667497743fd6-kube-api-access-wvvjc\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.936694 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-catalog-content\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.936787 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-utilities\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.937401 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-catalog-content\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.937435 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-utilities\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:39 crc kubenswrapper[4791]: I1208 22:12:39.961601 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvvjc\" (UniqueName: \"kubernetes.io/projected/9fe07170-4c57-4348-a953-667497743fd6-kube-api-access-wvvjc\") pod \"redhat-operators-fkf8w\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:40 crc kubenswrapper[4791]: I1208 22:12:40.049702 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:40 crc kubenswrapper[4791]: I1208 22:12:40.531696 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fkf8w"] Dec 08 22:12:40 crc kubenswrapper[4791]: I1208 22:12:40.930881 4791 generic.go:334] "Generic (PLEG): container finished" podID="9fe07170-4c57-4348-a953-667497743fd6" containerID="4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d" exitCode=0 Dec 08 22:12:40 crc kubenswrapper[4791]: I1208 22:12:40.930924 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkf8w" event={"ID":"9fe07170-4c57-4348-a953-667497743fd6","Type":"ContainerDied","Data":"4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d"} Dec 08 22:12:40 crc kubenswrapper[4791]: I1208 22:12:40.931163 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkf8w" event={"ID":"9fe07170-4c57-4348-a953-667497743fd6","Type":"ContainerStarted","Data":"7167e5521ceba80ec4a2b3afa32cd625ac756be01d7fb789fe6a371637e6a24f"} Dec 08 22:12:41 crc kubenswrapper[4791]: I1208 22:12:41.602287 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:12:41 crc kubenswrapper[4791]: E1208 22:12:41.603018 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:12:41 crc kubenswrapper[4791]: I1208 22:12:41.944471 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkf8w" event={"ID":"9fe07170-4c57-4348-a953-667497743fd6","Type":"ContainerStarted","Data":"5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c"} Dec 08 22:12:46 crc kubenswrapper[4791]: I1208 22:12:46.598910 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:12:46 crc kubenswrapper[4791]: E1208 22:12:46.599833 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:12:46 crc kubenswrapper[4791]: I1208 22:12:46.998139 4791 generic.go:334] "Generic (PLEG): container finished" podID="9fe07170-4c57-4348-a953-667497743fd6" containerID="5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c" exitCode=0 Dec 08 22:12:46 crc kubenswrapper[4791]: I1208 22:12:46.998289 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkf8w" event={"ID":"9fe07170-4c57-4348-a953-667497743fd6","Type":"ContainerDied","Data":"5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c"} Dec 08 22:12:48 crc kubenswrapper[4791]: I1208 22:12:48.013260 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkf8w" event={"ID":"9fe07170-4c57-4348-a953-667497743fd6","Type":"ContainerStarted","Data":"3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee"} Dec 08 22:12:48 crc kubenswrapper[4791]: I1208 22:12:48.044231 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fkf8w" podStartSLOduration=2.600941213 podStartE2EDuration="9.044206439s" podCreationTimestamp="2025-12-08 22:12:39 +0000 UTC" firstStartedPulling="2025-12-08 22:12:40.932682795 +0000 UTC m=+3237.631441140" lastFinishedPulling="2025-12-08 22:12:47.375948021 +0000 UTC m=+3244.074706366" observedRunningTime="2025-12-08 22:12:48.036196839 +0000 UTC m=+3244.734955194" watchObservedRunningTime="2025-12-08 22:12:48.044206439 +0000 UTC m=+3244.742964784" Dec 08 22:12:50 crc kubenswrapper[4791]: I1208 22:12:50.050557 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:50 crc kubenswrapper[4791]: I1208 22:12:50.051096 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:12:51 crc kubenswrapper[4791]: I1208 22:12:51.101967 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fkf8w" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="registry-server" probeResult="failure" output=< Dec 08 22:12:51 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 22:12:51 crc kubenswrapper[4791]: > Dec 08 22:12:56 crc kubenswrapper[4791]: I1208 22:12:56.597989 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:12:56 crc kubenswrapper[4791]: E1208 22:12:56.598747 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:12:57 crc kubenswrapper[4791]: I1208 22:12:57.598607 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:12:57 crc kubenswrapper[4791]: E1208 22:12:57.598984 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:13:00 crc kubenswrapper[4791]: I1208 22:13:00.123746 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:13:00 crc kubenswrapper[4791]: I1208 22:13:00.192011 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:13:00 crc kubenswrapper[4791]: I1208 22:13:00.367863 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fkf8w"] Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.143566 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fkf8w" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="registry-server" containerID="cri-o://3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee" gracePeriod=2 Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.672952 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.796775 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvvjc\" (UniqueName: \"kubernetes.io/projected/9fe07170-4c57-4348-a953-667497743fd6-kube-api-access-wvvjc\") pod \"9fe07170-4c57-4348-a953-667497743fd6\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.796955 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-catalog-content\") pod \"9fe07170-4c57-4348-a953-667497743fd6\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.797066 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-utilities\") pod \"9fe07170-4c57-4348-a953-667497743fd6\" (UID: \"9fe07170-4c57-4348-a953-667497743fd6\") " Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.797930 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-utilities" (OuterVolumeSpecName: "utilities") pod "9fe07170-4c57-4348-a953-667497743fd6" (UID: "9fe07170-4c57-4348-a953-667497743fd6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.803255 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fe07170-4c57-4348-a953-667497743fd6-kube-api-access-wvvjc" (OuterVolumeSpecName: "kube-api-access-wvvjc") pod "9fe07170-4c57-4348-a953-667497743fd6" (UID: "9fe07170-4c57-4348-a953-667497743fd6"). InnerVolumeSpecName "kube-api-access-wvvjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.899354 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvvjc\" (UniqueName: \"kubernetes.io/projected/9fe07170-4c57-4348-a953-667497743fd6-kube-api-access-wvvjc\") on node \"crc\" DevicePath \"\"" Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.899389 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:13:02 crc kubenswrapper[4791]: I1208 22:13:02.920421 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9fe07170-4c57-4348-a953-667497743fd6" (UID: "9fe07170-4c57-4348-a953-667497743fd6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.001168 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe07170-4c57-4348-a953-667497743fd6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.176334 4791 generic.go:334] "Generic (PLEG): container finished" podID="9fe07170-4c57-4348-a953-667497743fd6" containerID="3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee" exitCode=0 Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.176802 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkf8w" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.176840 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkf8w" event={"ID":"9fe07170-4c57-4348-a953-667497743fd6","Type":"ContainerDied","Data":"3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee"} Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.177124 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkf8w" event={"ID":"9fe07170-4c57-4348-a953-667497743fd6","Type":"ContainerDied","Data":"7167e5521ceba80ec4a2b3afa32cd625ac756be01d7fb789fe6a371637e6a24f"} Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.177167 4791 scope.go:117] "RemoveContainer" containerID="3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.222757 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fkf8w"] Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.222765 4791 scope.go:117] "RemoveContainer" containerID="5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.242695 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fkf8w"] Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.263932 4791 scope.go:117] "RemoveContainer" containerID="4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.322483 4791 scope.go:117] "RemoveContainer" containerID="3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee" Dec 08 22:13:03 crc kubenswrapper[4791]: E1208 22:13:03.323237 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee\": container with ID starting with 3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee not found: ID does not exist" containerID="3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.323278 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee"} err="failed to get container status \"3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee\": rpc error: code = NotFound desc = could not find container \"3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee\": container with ID starting with 3fa6e636cf0bdc314cae3631ef62104d4d5b120de8c9fa97fad93557357af9ee not found: ID does not exist" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.323300 4791 scope.go:117] "RemoveContainer" containerID="5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c" Dec 08 22:13:03 crc kubenswrapper[4791]: E1208 22:13:03.324333 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c\": container with ID starting with 5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c not found: ID does not exist" containerID="5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.324361 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c"} err="failed to get container status \"5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c\": rpc error: code = NotFound desc = could not find container \"5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c\": container with ID starting with 5a8be4113ab1367463a6817878a375380d08ee9fd4c403119620f06b46c6a74c not found: ID does not exist" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.324378 4791 scope.go:117] "RemoveContainer" containerID="4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d" Dec 08 22:13:03 crc kubenswrapper[4791]: E1208 22:13:03.327201 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d\": container with ID starting with 4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d not found: ID does not exist" containerID="4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.327248 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d"} err="failed to get container status \"4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d\": rpc error: code = NotFound desc = could not find container \"4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d\": container with ID starting with 4acb7a9260809b147e38829d8bc7d4837cab6a58b5682b5eb6865f50639ff83d not found: ID does not exist" Dec 08 22:13:03 crc kubenswrapper[4791]: I1208 22:13:03.618902 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fe07170-4c57-4348-a953-667497743fd6" path="/var/lib/kubelet/pods/9fe07170-4c57-4348-a953-667497743fd6/volumes" Dec 08 22:13:10 crc kubenswrapper[4791]: I1208 22:13:10.598375 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:13:10 crc kubenswrapper[4791]: E1208 22:13:10.599078 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:13:11 crc kubenswrapper[4791]: I1208 22:13:11.598265 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:13:11 crc kubenswrapper[4791]: E1208 22:13:11.598962 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:13:21 crc kubenswrapper[4791]: I1208 22:13:21.599130 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:13:21 crc kubenswrapper[4791]: E1208 22:13:21.600153 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:13:25 crc kubenswrapper[4791]: I1208 22:13:25.598167 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:13:25 crc kubenswrapper[4791]: E1208 22:13:25.599027 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:13:32 crc kubenswrapper[4791]: I1208 22:13:32.597782 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:13:32 crc kubenswrapper[4791]: E1208 22:13:32.598507 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:13:40 crc kubenswrapper[4791]: I1208 22:13:40.598160 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:13:41 crc kubenswrapper[4791]: I1208 22:13:41.588293 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"4fbebc898b9a73bf136a4d97fb9284a72c43d8541bbb5a2aac1ddaa1c9cf153a"} Dec 08 22:13:43 crc kubenswrapper[4791]: I1208 22:13:43.612559 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:13:43 crc kubenswrapper[4791]: E1208 22:13:43.613456 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:13:57 crc kubenswrapper[4791]: I1208 22:13:57.598973 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:13:57 crc kubenswrapper[4791]: E1208 22:13:57.599743 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:14:12 crc kubenswrapper[4791]: I1208 22:14:12.598373 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:14:12 crc kubenswrapper[4791]: E1208 22:14:12.599334 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:14:26 crc kubenswrapper[4791]: I1208 22:14:26.597940 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:14:26 crc kubenswrapper[4791]: E1208 22:14:26.598728 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:14:38 crc kubenswrapper[4791]: I1208 22:14:38.598372 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:14:38 crc kubenswrapper[4791]: E1208 22:14:38.599291 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:14:51 crc kubenswrapper[4791]: I1208 22:14:51.598628 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:14:51 crc kubenswrapper[4791]: E1208 22:14:51.599497 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.146530 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2"] Dec 08 22:15:00 crc kubenswrapper[4791]: E1208 22:15:00.147681 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="extract-utilities" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.147725 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="extract-utilities" Dec 08 22:15:00 crc kubenswrapper[4791]: E1208 22:15:00.147763 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="extract-content" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.147773 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="extract-content" Dec 08 22:15:00 crc kubenswrapper[4791]: E1208 22:15:00.147813 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="registry-server" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.147822 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="registry-server" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.148080 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fe07170-4c57-4348-a953-667497743fd6" containerName="registry-server" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.149170 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.152014 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.158049 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.160741 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2"] Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.207278 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa587672-c53b-4591-b5bf-5a1b55f077b0-secret-volume\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.207408 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thmvn\" (UniqueName: \"kubernetes.io/projected/fa587672-c53b-4591-b5bf-5a1b55f077b0-kube-api-access-thmvn\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.207581 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa587672-c53b-4591-b5bf-5a1b55f077b0-config-volume\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.310361 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thmvn\" (UniqueName: \"kubernetes.io/projected/fa587672-c53b-4591-b5bf-5a1b55f077b0-kube-api-access-thmvn\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.310449 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa587672-c53b-4591-b5bf-5a1b55f077b0-config-volume\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.310786 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa587672-c53b-4591-b5bf-5a1b55f077b0-secret-volume\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.311484 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa587672-c53b-4591-b5bf-5a1b55f077b0-config-volume\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.316859 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa587672-c53b-4591-b5bf-5a1b55f077b0-secret-volume\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.326947 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thmvn\" (UniqueName: \"kubernetes.io/projected/fa587672-c53b-4591-b5bf-5a1b55f077b0-kube-api-access-thmvn\") pod \"collect-profiles-29420535-8klb2\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.486403 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:00 crc kubenswrapper[4791]: I1208 22:15:00.945832 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2"] Dec 08 22:15:01 crc kubenswrapper[4791]: I1208 22:15:01.385286 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" event={"ID":"fa587672-c53b-4591-b5bf-5a1b55f077b0","Type":"ContainerStarted","Data":"bd125074ee5fecf21ee6a1d0dc9df27238207c1d538913f5af5aa43d46d6a57e"} Dec 08 22:15:01 crc kubenswrapper[4791]: I1208 22:15:01.385523 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" event={"ID":"fa587672-c53b-4591-b5bf-5a1b55f077b0","Type":"ContainerStarted","Data":"5d646a5541ee1a73887285b1a7da263d5d1c40c11ce57c5dd6ba202ed7fab5db"} Dec 08 22:15:01 crc kubenswrapper[4791]: I1208 22:15:01.407310 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" podStartSLOduration=1.4072875360000001 podStartE2EDuration="1.407287536s" podCreationTimestamp="2025-12-08 22:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 22:15:01.400835585 +0000 UTC m=+3378.099593930" watchObservedRunningTime="2025-12-08 22:15:01.407287536 +0000 UTC m=+3378.106045871" Dec 08 22:15:02 crc kubenswrapper[4791]: I1208 22:15:02.415750 4791 generic.go:334] "Generic (PLEG): container finished" podID="fa587672-c53b-4591-b5bf-5a1b55f077b0" containerID="bd125074ee5fecf21ee6a1d0dc9df27238207c1d538913f5af5aa43d46d6a57e" exitCode=0 Dec 08 22:15:02 crc kubenswrapper[4791]: I1208 22:15:02.415863 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" event={"ID":"fa587672-c53b-4591-b5bf-5a1b55f077b0","Type":"ContainerDied","Data":"bd125074ee5fecf21ee6a1d0dc9df27238207c1d538913f5af5aa43d46d6a57e"} Dec 08 22:15:03 crc kubenswrapper[4791]: I1208 22:15:03.865790 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:03 crc kubenswrapper[4791]: I1208 22:15:03.895590 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thmvn\" (UniqueName: \"kubernetes.io/projected/fa587672-c53b-4591-b5bf-5a1b55f077b0-kube-api-access-thmvn\") pod \"fa587672-c53b-4591-b5bf-5a1b55f077b0\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " Dec 08 22:15:03 crc kubenswrapper[4791]: I1208 22:15:03.895680 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa587672-c53b-4591-b5bf-5a1b55f077b0-config-volume\") pod \"fa587672-c53b-4591-b5bf-5a1b55f077b0\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " Dec 08 22:15:03 crc kubenswrapper[4791]: I1208 22:15:03.896336 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa587672-c53b-4591-b5bf-5a1b55f077b0-secret-volume\") pod \"fa587672-c53b-4591-b5bf-5a1b55f077b0\" (UID: \"fa587672-c53b-4591-b5bf-5a1b55f077b0\") " Dec 08 22:15:03 crc kubenswrapper[4791]: I1208 22:15:03.900010 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa587672-c53b-4591-b5bf-5a1b55f077b0-config-volume" (OuterVolumeSpecName: "config-volume") pod "fa587672-c53b-4591-b5bf-5a1b55f077b0" (UID: "fa587672-c53b-4591-b5bf-5a1b55f077b0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 22:15:03 crc kubenswrapper[4791]: I1208 22:15:03.908085 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa587672-c53b-4591-b5bf-5a1b55f077b0-kube-api-access-thmvn" (OuterVolumeSpecName: "kube-api-access-thmvn") pod "fa587672-c53b-4591-b5bf-5a1b55f077b0" (UID: "fa587672-c53b-4591-b5bf-5a1b55f077b0"). InnerVolumeSpecName "kube-api-access-thmvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:15:03 crc kubenswrapper[4791]: I1208 22:15:03.927868 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa587672-c53b-4591-b5bf-5a1b55f077b0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fa587672-c53b-4591-b5bf-5a1b55f077b0" (UID: "fa587672-c53b-4591-b5bf-5a1b55f077b0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.000997 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa587672-c53b-4591-b5bf-5a1b55f077b0-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.001044 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thmvn\" (UniqueName: \"kubernetes.io/projected/fa587672-c53b-4591-b5bf-5a1b55f077b0-kube-api-access-thmvn\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.001054 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa587672-c53b-4591-b5bf-5a1b55f077b0-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.439631 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" event={"ID":"fa587672-c53b-4591-b5bf-5a1b55f077b0","Type":"ContainerDied","Data":"5d646a5541ee1a73887285b1a7da263d5d1c40c11ce57c5dd6ba202ed7fab5db"} Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.439982 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d646a5541ee1a73887285b1a7da263d5d1c40c11ce57c5dd6ba202ed7fab5db" Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.440053 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-8klb2" Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.483886 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4"] Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.493473 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-klwv4"] Dec 08 22:15:04 crc kubenswrapper[4791]: I1208 22:15:04.598827 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:15:04 crc kubenswrapper[4791]: E1208 22:15:04.599360 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:15:05 crc kubenswrapper[4791]: I1208 22:15:05.613639 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69c09f4f-b646-4950-ace1-e4c4c8fb3c3e" path="/var/lib/kubelet/pods/69c09f4f-b646-4950-ace1-e4c4c8fb3c3e/volumes" Dec 08 22:15:18 crc kubenswrapper[4791]: I1208 22:15:18.804071 4791 scope.go:117] "RemoveContainer" containerID="ff380b221b95f44bf3f85d68556f419d611fe1f3e6b4e5e9b308af98c52de7fd" Dec 08 22:15:19 crc kubenswrapper[4791]: I1208 22:15:19.601355 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:15:19 crc kubenswrapper[4791]: E1208 22:15:19.601898 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:15:34 crc kubenswrapper[4791]: I1208 22:15:34.598269 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:15:34 crc kubenswrapper[4791]: E1208 22:15:34.599078 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:15:49 crc kubenswrapper[4791]: I1208 22:15:49.598001 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:15:49 crc kubenswrapper[4791]: E1208 22:15:49.598877 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.221494 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l5w4g"] Dec 08 22:15:59 crc kubenswrapper[4791]: E1208 22:15:59.224301 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa587672-c53b-4591-b5bf-5a1b55f077b0" containerName="collect-profiles" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.224427 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa587672-c53b-4591-b5bf-5a1b55f077b0" containerName="collect-profiles" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.224791 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa587672-c53b-4591-b5bf-5a1b55f077b0" containerName="collect-profiles" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.226739 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.240538 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l5w4g"] Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.375385 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c9jr\" (UniqueName: \"kubernetes.io/projected/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-kube-api-access-5c9jr\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.375793 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-catalog-content\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.375952 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-utilities\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.478728 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c9jr\" (UniqueName: \"kubernetes.io/projected/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-kube-api-access-5c9jr\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.478832 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-catalog-content\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.478886 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-utilities\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.479423 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-catalog-content\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.479518 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-utilities\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.503863 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c9jr\" (UniqueName: \"kubernetes.io/projected/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-kube-api-access-5c9jr\") pod \"community-operators-l5w4g\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:15:59 crc kubenswrapper[4791]: I1208 22:15:59.594654 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:16:00 crc kubenswrapper[4791]: I1208 22:16:00.232149 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l5w4g"] Dec 08 22:16:00 crc kubenswrapper[4791]: W1208 22:16:00.232190 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeceaaa41_ffe1_4dc1_a9bd_5318e0dadae6.slice/crio-f11450e104fec781c6ba29cb9680f1525c4be7710d85a2a1ebf73a8833b22442 WatchSource:0}: Error finding container f11450e104fec781c6ba29cb9680f1525c4be7710d85a2a1ebf73a8833b22442: Status 404 returned error can't find the container with id f11450e104fec781c6ba29cb9680f1525c4be7710d85a2a1ebf73a8833b22442 Dec 08 22:16:00 crc kubenswrapper[4791]: I1208 22:16:00.598482 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:16:01 crc kubenswrapper[4791]: I1208 22:16:01.068701 4791 generic.go:334] "Generic (PLEG): container finished" podID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerID="8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543" exitCode=0 Dec 08 22:16:01 crc kubenswrapper[4791]: I1208 22:16:01.068827 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5w4g" event={"ID":"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6","Type":"ContainerDied","Data":"8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543"} Dec 08 22:16:01 crc kubenswrapper[4791]: I1208 22:16:01.069042 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5w4g" event={"ID":"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6","Type":"ContainerStarted","Data":"f11450e104fec781c6ba29cb9680f1525c4be7710d85a2a1ebf73a8833b22442"} Dec 08 22:16:01 crc kubenswrapper[4791]: I1208 22:16:01.071474 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d"} Dec 08 22:16:01 crc kubenswrapper[4791]: I1208 22:16:01.071728 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:16:03 crc kubenswrapper[4791]: I1208 22:16:03.112486 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5w4g" event={"ID":"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6","Type":"ContainerStarted","Data":"48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e"} Dec 08 22:16:04 crc kubenswrapper[4791]: I1208 22:16:04.122647 4791 generic.go:334] "Generic (PLEG): container finished" podID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerID="48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e" exitCode=0 Dec 08 22:16:04 crc kubenswrapper[4791]: I1208 22:16:04.122698 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5w4g" event={"ID":"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6","Type":"ContainerDied","Data":"48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e"} Dec 08 22:16:05 crc kubenswrapper[4791]: I1208 22:16:05.133896 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5w4g" event={"ID":"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6","Type":"ContainerStarted","Data":"2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127"} Dec 08 22:16:05 crc kubenswrapper[4791]: I1208 22:16:05.161312 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l5w4g" podStartSLOduration=2.678535521 podStartE2EDuration="6.161292921s" podCreationTimestamp="2025-12-08 22:15:59 +0000 UTC" firstStartedPulling="2025-12-08 22:16:01.07124096 +0000 UTC m=+3437.769999305" lastFinishedPulling="2025-12-08 22:16:04.55399836 +0000 UTC m=+3441.252756705" observedRunningTime="2025-12-08 22:16:05.156359676 +0000 UTC m=+3441.855118031" watchObservedRunningTime="2025-12-08 22:16:05.161292921 +0000 UTC m=+3441.860051266" Dec 08 22:16:05 crc kubenswrapper[4791]: I1208 22:16:05.252069 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:16:05 crc kubenswrapper[4791]: I1208 22:16:05.252163 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:16:05 crc kubenswrapper[4791]: I1208 22:16:05.886855 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:16:09 crc kubenswrapper[4791]: I1208 22:16:09.595490 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:16:09 crc kubenswrapper[4791]: I1208 22:16:09.596055 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:16:09 crc kubenswrapper[4791]: I1208 22:16:09.665532 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:16:10 crc kubenswrapper[4791]: I1208 22:16:10.248403 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:16:10 crc kubenswrapper[4791]: I1208 22:16:10.316417 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l5w4g"] Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.207015 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l5w4g" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="registry-server" containerID="cri-o://2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127" gracePeriod=2 Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.720485 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.832429 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-catalog-content\") pod \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.832496 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-utilities\") pod \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.832614 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c9jr\" (UniqueName: \"kubernetes.io/projected/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-kube-api-access-5c9jr\") pod \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\" (UID: \"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6\") " Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.833363 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-utilities" (OuterVolumeSpecName: "utilities") pod "eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" (UID: "eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.837952 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-kube-api-access-5c9jr" (OuterVolumeSpecName: "kube-api-access-5c9jr") pod "eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" (UID: "eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6"). InnerVolumeSpecName "kube-api-access-5c9jr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.890410 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" (UID: "eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.935572 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c9jr\" (UniqueName: \"kubernetes.io/projected/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-kube-api-access-5c9jr\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.935637 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:12 crc kubenswrapper[4791]: I1208 22:16:12.935652 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.220724 4791 generic.go:334] "Generic (PLEG): container finished" podID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerID="2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127" exitCode=0 Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.220779 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5w4g" event={"ID":"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6","Type":"ContainerDied","Data":"2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127"} Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.220815 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5w4g" event={"ID":"eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6","Type":"ContainerDied","Data":"f11450e104fec781c6ba29cb9680f1525c4be7710d85a2a1ebf73a8833b22442"} Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.220832 4791 scope.go:117] "RemoveContainer" containerID="2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.220889 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5w4g" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.242003 4791 scope.go:117] "RemoveContainer" containerID="48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.264242 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l5w4g"] Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.275019 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l5w4g"] Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.302377 4791 scope.go:117] "RemoveContainer" containerID="8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.323571 4791 scope.go:117] "RemoveContainer" containerID="2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127" Dec 08 22:16:13 crc kubenswrapper[4791]: E1208 22:16:13.324268 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127\": container with ID starting with 2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127 not found: ID does not exist" containerID="2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.324315 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127"} err="failed to get container status \"2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127\": rpc error: code = NotFound desc = could not find container \"2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127\": container with ID starting with 2858e27848d5806c19c5071da7799c16449b021a78bcae9f6a485156c5232127 not found: ID does not exist" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.324347 4791 scope.go:117] "RemoveContainer" containerID="48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e" Dec 08 22:16:13 crc kubenswrapper[4791]: E1208 22:16:13.324912 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e\": container with ID starting with 48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e not found: ID does not exist" containerID="48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.324975 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e"} err="failed to get container status \"48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e\": rpc error: code = NotFound desc = could not find container \"48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e\": container with ID starting with 48b8114359fe1a474c82015a458465fc8820f28ed091f704365dba7657bca41e not found: ID does not exist" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.325011 4791 scope.go:117] "RemoveContainer" containerID="8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543" Dec 08 22:16:13 crc kubenswrapper[4791]: E1208 22:16:13.325406 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543\": container with ID starting with 8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543 not found: ID does not exist" containerID="8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.325438 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543"} err="failed to get container status \"8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543\": rpc error: code = NotFound desc = could not find container \"8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543\": container with ID starting with 8af6d4e240227c098d5ffca57c9049ac6c3e4830bdb198682a23ee8e0342a543 not found: ID does not exist" Dec 08 22:16:13 crc kubenswrapper[4791]: I1208 22:16:13.612797 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" path="/var/lib/kubelet/pods/eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6/volumes" Dec 08 22:16:35 crc kubenswrapper[4791]: I1208 22:16:35.251913 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:16:35 crc kubenswrapper[4791]: I1208 22:16:35.252425 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.251848 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.252375 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.252439 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.253385 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4fbebc898b9a73bf136a4d97fb9284a72c43d8541bbb5a2aac1ddaa1c9cf153a"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.253432 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://4fbebc898b9a73bf136a4d97fb9284a72c43d8541bbb5a2aac1ddaa1c9cf153a" gracePeriod=600 Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.757525 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="4fbebc898b9a73bf136a4d97fb9284a72c43d8541bbb5a2aac1ddaa1c9cf153a" exitCode=0 Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.757977 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"4fbebc898b9a73bf136a4d97fb9284a72c43d8541bbb5a2aac1ddaa1c9cf153a"} Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.758006 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903"} Dec 08 22:17:05 crc kubenswrapper[4791]: I1208 22:17:05.758023 4791 scope.go:117] "RemoveContainer" containerID="d71e943fae01350220f668be28850d267e8c50777be2af922215add5cdf7fee4" Dec 08 22:18:33 crc kubenswrapper[4791]: I1208 22:18:33.654989 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" exitCode=1 Dec 08 22:18:33 crc kubenswrapper[4791]: I1208 22:18:33.655044 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d"} Dec 08 22:18:33 crc kubenswrapper[4791]: I1208 22:18:33.655532 4791 scope.go:117] "RemoveContainer" containerID="7b6c9b2a607849ad375959fc2219a81ee02917eed20de7fb431c4855f9233e1d" Dec 08 22:18:33 crc kubenswrapper[4791]: I1208 22:18:33.656816 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:18:33 crc kubenswrapper[4791]: E1208 22:18:33.657112 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:18:35 crc kubenswrapper[4791]: I1208 22:18:35.884800 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:18:35 crc kubenswrapper[4791]: I1208 22:18:35.885738 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:18:35 crc kubenswrapper[4791]: E1208 22:18:35.886074 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:18:45 crc kubenswrapper[4791]: I1208 22:18:45.885227 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:18:45 crc kubenswrapper[4791]: I1208 22:18:45.886554 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:18:45 crc kubenswrapper[4791]: E1208 22:18:45.886853 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:18:57 crc kubenswrapper[4791]: I1208 22:18:57.598474 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:18:57 crc kubenswrapper[4791]: E1208 22:18:57.599290 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:19:05 crc kubenswrapper[4791]: I1208 22:19:05.252057 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:19:05 crc kubenswrapper[4791]: I1208 22:19:05.252620 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:19:08 crc kubenswrapper[4791]: I1208 22:19:08.598613 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:19:08 crc kubenswrapper[4791]: E1208 22:19:08.599489 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:19:23 crc kubenswrapper[4791]: I1208 22:19:23.607212 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:19:23 crc kubenswrapper[4791]: E1208 22:19:23.608158 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:19:34 crc kubenswrapper[4791]: I1208 22:19:34.597569 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:19:34 crc kubenswrapper[4791]: E1208 22:19:34.598266 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:19:35 crc kubenswrapper[4791]: I1208 22:19:35.251561 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:19:35 crc kubenswrapper[4791]: I1208 22:19:35.251619 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:19:47 crc kubenswrapper[4791]: I1208 22:19:47.599551 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:19:47 crc kubenswrapper[4791]: E1208 22:19:47.600296 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:20:00 crc kubenswrapper[4791]: I1208 22:20:00.598285 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:20:00 crc kubenswrapper[4791]: E1208 22:20:00.599165 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.252046 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.252594 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.252655 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.253434 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.253494 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" gracePeriod=600 Dec 08 22:20:05 crc kubenswrapper[4791]: E1208 22:20:05.376134 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.542207 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" exitCode=0 Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.542268 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903"} Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.542527 4791 scope.go:117] "RemoveContainer" containerID="4fbebc898b9a73bf136a4d97fb9284a72c43d8541bbb5a2aac1ddaa1c9cf153a" Dec 08 22:20:05 crc kubenswrapper[4791]: I1208 22:20:05.543249 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:20:05 crc kubenswrapper[4791]: E1208 22:20:05.543542 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:20:12 crc kubenswrapper[4791]: I1208 22:20:12.598158 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:20:12 crc kubenswrapper[4791]: E1208 22:20:12.599338 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:20:17 crc kubenswrapper[4791]: I1208 22:20:17.597916 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:20:17 crc kubenswrapper[4791]: E1208 22:20:17.599682 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:20:26 crc kubenswrapper[4791]: I1208 22:20:26.597647 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:20:26 crc kubenswrapper[4791]: E1208 22:20:26.598420 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:20:32 crc kubenswrapper[4791]: I1208 22:20:32.598466 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:20:32 crc kubenswrapper[4791]: E1208 22:20:32.600367 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:20:40 crc kubenswrapper[4791]: I1208 22:20:40.598611 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:20:40 crc kubenswrapper[4791]: E1208 22:20:40.599286 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:20:47 crc kubenswrapper[4791]: I1208 22:20:47.598628 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:20:47 crc kubenswrapper[4791]: E1208 22:20:47.599556 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:20:51 crc kubenswrapper[4791]: I1208 22:20:51.598817 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:20:51 crc kubenswrapper[4791]: E1208 22:20:51.599818 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:21:01 crc kubenswrapper[4791]: I1208 22:21:01.598557 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:21:01 crc kubenswrapper[4791]: E1208 22:21:01.599539 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:21:04 crc kubenswrapper[4791]: I1208 22:21:04.598141 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:21:04 crc kubenswrapper[4791]: E1208 22:21:04.598451 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:21:14 crc kubenswrapper[4791]: I1208 22:21:14.599238 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:21:14 crc kubenswrapper[4791]: E1208 22:21:14.600281 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:21:16 crc kubenswrapper[4791]: I1208 22:21:16.598491 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:21:16 crc kubenswrapper[4791]: E1208 22:21:16.599145 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:21:25 crc kubenswrapper[4791]: I1208 22:21:25.597753 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:21:25 crc kubenswrapper[4791]: E1208 22:21:25.598986 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:21:31 crc kubenswrapper[4791]: I1208 22:21:31.598315 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:21:31 crc kubenswrapper[4791]: E1208 22:21:31.599131 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:21:36 crc kubenswrapper[4791]: I1208 22:21:36.598560 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:21:36 crc kubenswrapper[4791]: E1208 22:21:36.599311 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:21:45 crc kubenswrapper[4791]: I1208 22:21:45.600345 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:21:45 crc kubenswrapper[4791]: E1208 22:21:45.601389 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:21:47 crc kubenswrapper[4791]: I1208 22:21:47.599011 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:21:47 crc kubenswrapper[4791]: E1208 22:21:47.599859 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:21:58 crc kubenswrapper[4791]: I1208 22:21:58.597606 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:21:58 crc kubenswrapper[4791]: E1208 22:21:58.598332 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:22:00 crc kubenswrapper[4791]: I1208 22:22:00.598363 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:22:00 crc kubenswrapper[4791]: E1208 22:22:00.599097 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:22:09 crc kubenswrapper[4791]: I1208 22:22:09.599644 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:22:09 crc kubenswrapper[4791]: E1208 22:22:09.600694 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:22:15 crc kubenswrapper[4791]: I1208 22:22:15.598210 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:22:15 crc kubenswrapper[4791]: E1208 22:22:15.599193 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:22:20 crc kubenswrapper[4791]: I1208 22:22:20.598606 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:22:20 crc kubenswrapper[4791]: E1208 22:22:20.599442 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:22:29 crc kubenswrapper[4791]: I1208 22:22:29.598667 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:22:29 crc kubenswrapper[4791]: E1208 22:22:29.600621 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:22:32 crc kubenswrapper[4791]: I1208 22:22:32.598206 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:22:32 crc kubenswrapper[4791]: E1208 22:22:32.599080 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.043544 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kdlxz"] Dec 08 22:22:35 crc kubenswrapper[4791]: E1208 22:22:35.044382 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="registry-server" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.044395 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="registry-server" Dec 08 22:22:35 crc kubenswrapper[4791]: E1208 22:22:35.044439 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="extract-utilities" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.044446 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="extract-utilities" Dec 08 22:22:35 crc kubenswrapper[4791]: E1208 22:22:35.044463 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="extract-content" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.044469 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="extract-content" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.044749 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="eceaaa41-ffe1-4dc1-a9bd-5318e0dadae6" containerName="registry-server" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.046643 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.072638 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdlxz"] Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.106567 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-catalog-content\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.107077 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztdrs\" (UniqueName: \"kubernetes.io/projected/06fc921b-5c3d-4d45-ba21-543cc622e482-kube-api-access-ztdrs\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.107322 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-utilities\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.208368 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-utilities\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.208482 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-catalog-content\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.208588 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztdrs\" (UniqueName: \"kubernetes.io/projected/06fc921b-5c3d-4d45-ba21-543cc622e482-kube-api-access-ztdrs\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.209047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-catalog-content\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.209047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-utilities\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.245296 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztdrs\" (UniqueName: \"kubernetes.io/projected/06fc921b-5c3d-4d45-ba21-543cc622e482-kube-api-access-ztdrs\") pod \"redhat-marketplace-kdlxz\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.387600 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:35 crc kubenswrapper[4791]: I1208 22:22:35.929620 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdlxz"] Dec 08 22:22:36 crc kubenswrapper[4791]: I1208 22:22:36.355611 4791 generic.go:334] "Generic (PLEG): container finished" podID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerID="1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861" exitCode=0 Dec 08 22:22:36 crc kubenswrapper[4791]: I1208 22:22:36.355749 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdlxz" event={"ID":"06fc921b-5c3d-4d45-ba21-543cc622e482","Type":"ContainerDied","Data":"1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861"} Dec 08 22:22:36 crc kubenswrapper[4791]: I1208 22:22:36.355923 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdlxz" event={"ID":"06fc921b-5c3d-4d45-ba21-543cc622e482","Type":"ContainerStarted","Data":"6111a42934c764af0492a53f6abbbaa2acac1ba495c17a76891b01db08397300"} Dec 08 22:22:36 crc kubenswrapper[4791]: I1208 22:22:36.357950 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:22:37 crc kubenswrapper[4791]: I1208 22:22:37.369887 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdlxz" event={"ID":"06fc921b-5c3d-4d45-ba21-543cc622e482","Type":"ContainerStarted","Data":"d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4"} Dec 08 22:22:38 crc kubenswrapper[4791]: I1208 22:22:38.381781 4791 generic.go:334] "Generic (PLEG): container finished" podID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerID="d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4" exitCode=0 Dec 08 22:22:38 crc kubenswrapper[4791]: I1208 22:22:38.381847 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdlxz" event={"ID":"06fc921b-5c3d-4d45-ba21-543cc622e482","Type":"ContainerDied","Data":"d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4"} Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.395505 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdlxz" event={"ID":"06fc921b-5c3d-4d45-ba21-543cc622e482","Type":"ContainerStarted","Data":"b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07"} Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.425294 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kdlxz" podStartSLOduration=1.791881992 podStartE2EDuration="4.42527402s" podCreationTimestamp="2025-12-08 22:22:35 +0000 UTC" firstStartedPulling="2025-12-08 22:22:36.357655994 +0000 UTC m=+3833.056414339" lastFinishedPulling="2025-12-08 22:22:38.991048022 +0000 UTC m=+3835.689806367" observedRunningTime="2025-12-08 22:22:39.414109587 +0000 UTC m=+3836.112867972" watchObservedRunningTime="2025-12-08 22:22:39.42527402 +0000 UTC m=+3836.124032365" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.631683 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hccgz"] Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.636752 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.639014 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hccgz"] Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.719493 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-utilities\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.719631 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk7gp\" (UniqueName: \"kubernetes.io/projected/52797cda-77dc-4741-82f8-6fcdcb338995-kube-api-access-gk7gp\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.719671 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-catalog-content\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.821565 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-utilities\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.821681 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk7gp\" (UniqueName: \"kubernetes.io/projected/52797cda-77dc-4741-82f8-6fcdcb338995-kube-api-access-gk7gp\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.821730 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-catalog-content\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.822173 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-catalog-content\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.822559 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-utilities\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.843672 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk7gp\" (UniqueName: \"kubernetes.io/projected/52797cda-77dc-4741-82f8-6fcdcb338995-kube-api-access-gk7gp\") pod \"certified-operators-hccgz\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:39 crc kubenswrapper[4791]: I1208 22:22:39.976217 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:40 crc kubenswrapper[4791]: I1208 22:22:40.581207 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hccgz"] Dec 08 22:22:41 crc kubenswrapper[4791]: I1208 22:22:41.432093 4791 generic.go:334] "Generic (PLEG): container finished" podID="52797cda-77dc-4741-82f8-6fcdcb338995" containerID="d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b" exitCode=0 Dec 08 22:22:41 crc kubenswrapper[4791]: I1208 22:22:41.432194 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hccgz" event={"ID":"52797cda-77dc-4741-82f8-6fcdcb338995","Type":"ContainerDied","Data":"d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b"} Dec 08 22:22:41 crc kubenswrapper[4791]: I1208 22:22:41.432574 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hccgz" event={"ID":"52797cda-77dc-4741-82f8-6fcdcb338995","Type":"ContainerStarted","Data":"0fa0d6216cbcd5b3b379eae6a2b918b1b985771f5bbd920850062eb96e0878a6"} Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.013008 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xkbwz"] Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.016088 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.025849 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xkbwz"] Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.181236 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-utilities\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.181431 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsh4c\" (UniqueName: \"kubernetes.io/projected/8234b10a-6f7f-4651-a230-b427dc587412-kube-api-access-xsh4c\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.181811 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-catalog-content\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.284297 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-utilities\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.284731 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsh4c\" (UniqueName: \"kubernetes.io/projected/8234b10a-6f7f-4651-a230-b427dc587412-kube-api-access-xsh4c\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.284856 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-catalog-content\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.284996 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-utilities\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.285373 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-catalog-content\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.307489 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsh4c\" (UniqueName: \"kubernetes.io/projected/8234b10a-6f7f-4651-a230-b427dc587412-kube-api-access-xsh4c\") pod \"redhat-operators-xkbwz\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.342226 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.489498 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hccgz" event={"ID":"52797cda-77dc-4741-82f8-6fcdcb338995","Type":"ContainerStarted","Data":"f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f"} Dec 08 22:22:42 crc kubenswrapper[4791]: I1208 22:22:42.876219 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xkbwz"] Dec 08 22:22:43 crc kubenswrapper[4791]: I1208 22:22:43.500566 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerStarted","Data":"b18df498b307a63cdc01ca98266ee4ddb8cba0a82cbee2cb5e22a1a352d6a913"} Dec 08 22:22:43 crc kubenswrapper[4791]: I1208 22:22:43.500630 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerStarted","Data":"ad1884d0cbefacb01c59f9c34483ca1b02ca6b5337290014c124a4d0a2cfb168"} Dec 08 22:22:43 crc kubenswrapper[4791]: I1208 22:22:43.600701 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:22:43 crc kubenswrapper[4791]: E1208 22:22:43.601082 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:22:44 crc kubenswrapper[4791]: I1208 22:22:44.512175 4791 generic.go:334] "Generic (PLEG): container finished" podID="52797cda-77dc-4741-82f8-6fcdcb338995" containerID="f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f" exitCode=0 Dec 08 22:22:44 crc kubenswrapper[4791]: I1208 22:22:44.512243 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hccgz" event={"ID":"52797cda-77dc-4741-82f8-6fcdcb338995","Type":"ContainerDied","Data":"f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f"} Dec 08 22:22:44 crc kubenswrapper[4791]: I1208 22:22:44.514482 4791 generic.go:334] "Generic (PLEG): container finished" podID="8234b10a-6f7f-4651-a230-b427dc587412" containerID="b18df498b307a63cdc01ca98266ee4ddb8cba0a82cbee2cb5e22a1a352d6a913" exitCode=0 Dec 08 22:22:44 crc kubenswrapper[4791]: I1208 22:22:44.514522 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerDied","Data":"b18df498b307a63cdc01ca98266ee4ddb8cba0a82cbee2cb5e22a1a352d6a913"} Dec 08 22:22:45 crc kubenswrapper[4791]: I1208 22:22:45.389102 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:45 crc kubenswrapper[4791]: I1208 22:22:45.389400 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:45 crc kubenswrapper[4791]: I1208 22:22:45.443604 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:45 crc kubenswrapper[4791]: I1208 22:22:45.525578 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hccgz" event={"ID":"52797cda-77dc-4741-82f8-6fcdcb338995","Type":"ContainerStarted","Data":"ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811"} Dec 08 22:22:45 crc kubenswrapper[4791]: I1208 22:22:45.527490 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerStarted","Data":"adcab5380698f4caf9b4f92d11e742d6c0d7f2bc8f1145498706826d7eef5e1b"} Dec 08 22:22:45 crc kubenswrapper[4791]: I1208 22:22:45.553695 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hccgz" podStartSLOduration=3.066956326 podStartE2EDuration="6.553673807s" podCreationTimestamp="2025-12-08 22:22:39 +0000 UTC" firstStartedPulling="2025-12-08 22:22:41.435481093 +0000 UTC m=+3838.134239438" lastFinishedPulling="2025-12-08 22:22:44.922198574 +0000 UTC m=+3841.620956919" observedRunningTime="2025-12-08 22:22:45.545358017 +0000 UTC m=+3842.244116362" watchObservedRunningTime="2025-12-08 22:22:45.553673807 +0000 UTC m=+3842.252432152" Dec 08 22:22:45 crc kubenswrapper[4791]: I1208 22:22:45.586207 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:46 crc kubenswrapper[4791]: I1208 22:22:46.597638 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:22:46 crc kubenswrapper[4791]: E1208 22:22:46.598451 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:22:48 crc kubenswrapper[4791]: I1208 22:22:48.212276 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdlxz"] Dec 08 22:22:48 crc kubenswrapper[4791]: I1208 22:22:48.212865 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kdlxz" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="registry-server" containerID="cri-o://b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07" gracePeriod=2 Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.242816 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.361858 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-utilities\") pod \"06fc921b-5c3d-4d45-ba21-543cc622e482\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.362292 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztdrs\" (UniqueName: \"kubernetes.io/projected/06fc921b-5c3d-4d45-ba21-543cc622e482-kube-api-access-ztdrs\") pod \"06fc921b-5c3d-4d45-ba21-543cc622e482\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.362461 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-catalog-content\") pod \"06fc921b-5c3d-4d45-ba21-543cc622e482\" (UID: \"06fc921b-5c3d-4d45-ba21-543cc622e482\") " Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.371868 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-utilities" (OuterVolumeSpecName: "utilities") pod "06fc921b-5c3d-4d45-ba21-543cc622e482" (UID: "06fc921b-5c3d-4d45-ba21-543cc622e482"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.374817 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06fc921b-5c3d-4d45-ba21-543cc622e482-kube-api-access-ztdrs" (OuterVolumeSpecName: "kube-api-access-ztdrs") pod "06fc921b-5c3d-4d45-ba21-543cc622e482" (UID: "06fc921b-5c3d-4d45-ba21-543cc622e482"). InnerVolumeSpecName "kube-api-access-ztdrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.387105 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06fc921b-5c3d-4d45-ba21-543cc622e482" (UID: "06fc921b-5c3d-4d45-ba21-543cc622e482"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.465669 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.465725 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztdrs\" (UniqueName: \"kubernetes.io/projected/06fc921b-5c3d-4d45-ba21-543cc622e482-kube-api-access-ztdrs\") on node \"crc\" DevicePath \"\"" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.465745 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fc921b-5c3d-4d45-ba21-543cc622e482-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.567313 4791 generic.go:334] "Generic (PLEG): container finished" podID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerID="b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07" exitCode=0 Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.567399 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdlxz" event={"ID":"06fc921b-5c3d-4d45-ba21-543cc622e482","Type":"ContainerDied","Data":"b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07"} Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.567431 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdlxz" event={"ID":"06fc921b-5c3d-4d45-ba21-543cc622e482","Type":"ContainerDied","Data":"6111a42934c764af0492a53f6abbbaa2acac1ba495c17a76891b01db08397300"} Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.567414 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdlxz" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.567452 4791 scope.go:117] "RemoveContainer" containerID="b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.573274 4791 generic.go:334] "Generic (PLEG): container finished" podID="8234b10a-6f7f-4651-a230-b427dc587412" containerID="adcab5380698f4caf9b4f92d11e742d6c0d7f2bc8f1145498706826d7eef5e1b" exitCode=0 Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.573335 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerDied","Data":"adcab5380698f4caf9b4f92d11e742d6c0d7f2bc8f1145498706826d7eef5e1b"} Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.590927 4791 scope.go:117] "RemoveContainer" containerID="d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.635340 4791 scope.go:117] "RemoveContainer" containerID="1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.644052 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdlxz"] Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.660336 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdlxz"] Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.675151 4791 scope.go:117] "RemoveContainer" containerID="b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07" Dec 08 22:22:49 crc kubenswrapper[4791]: E1208 22:22:49.675676 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07\": container with ID starting with b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07 not found: ID does not exist" containerID="b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.675843 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07"} err="failed to get container status \"b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07\": rpc error: code = NotFound desc = could not find container \"b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07\": container with ID starting with b408a22c5e0b8c9b40b070c3998b131545be6af2c687166bda440d8cf0c16d07 not found: ID does not exist" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.675880 4791 scope.go:117] "RemoveContainer" containerID="d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4" Dec 08 22:22:49 crc kubenswrapper[4791]: E1208 22:22:49.676227 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4\": container with ID starting with d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4 not found: ID does not exist" containerID="d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.676253 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4"} err="failed to get container status \"d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4\": rpc error: code = NotFound desc = could not find container \"d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4\": container with ID starting with d4ebc52a319aad64c37e3f60ac8ee850c72681a5a10b77b27c60e4b11abda3f4 not found: ID does not exist" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.676271 4791 scope.go:117] "RemoveContainer" containerID="1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861" Dec 08 22:22:49 crc kubenswrapper[4791]: E1208 22:22:49.676521 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861\": container with ID starting with 1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861 not found: ID does not exist" containerID="1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.676550 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861"} err="failed to get container status \"1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861\": rpc error: code = NotFound desc = could not find container \"1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861\": container with ID starting with 1a0f7e3ced83e870fd9cf1a69703e6ba6b3a35815f2071e58a81cb0c50ea8861 not found: ID does not exist" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.976677 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:49 crc kubenswrapper[4791]: I1208 22:22:49.977045 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:22:50 crc kubenswrapper[4791]: I1208 22:22:50.588064 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerStarted","Data":"c25f2b5e2d348550dfca71c35c11ca3aebfe508484eb200934d43b8a830db0a5"} Dec 08 22:22:50 crc kubenswrapper[4791]: I1208 22:22:50.619769 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xkbwz" podStartSLOduration=4.15575759 podStartE2EDuration="9.61972548s" podCreationTimestamp="2025-12-08 22:22:41 +0000 UTC" firstStartedPulling="2025-12-08 22:22:44.516238632 +0000 UTC m=+3841.214996977" lastFinishedPulling="2025-12-08 22:22:49.980206522 +0000 UTC m=+3846.678964867" observedRunningTime="2025-12-08 22:22:50.611405349 +0000 UTC m=+3847.310163694" watchObservedRunningTime="2025-12-08 22:22:50.61972548 +0000 UTC m=+3847.318483835" Dec 08 22:22:51 crc kubenswrapper[4791]: I1208 22:22:51.030447 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-hccgz" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="registry-server" probeResult="failure" output=< Dec 08 22:22:51 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 22:22:51 crc kubenswrapper[4791]: > Dec 08 22:22:51 crc kubenswrapper[4791]: I1208 22:22:51.639221 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" path="/var/lib/kubelet/pods/06fc921b-5c3d-4d45-ba21-543cc622e482/volumes" Dec 08 22:22:52 crc kubenswrapper[4791]: I1208 22:22:52.343318 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:52 crc kubenswrapper[4791]: I1208 22:22:52.343908 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:22:53 crc kubenswrapper[4791]: I1208 22:22:53.393984 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xkbwz" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="registry-server" probeResult="failure" output=< Dec 08 22:22:53 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 22:22:53 crc kubenswrapper[4791]: > Dec 08 22:22:58 crc kubenswrapper[4791]: I1208 22:22:58.598221 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:22:58 crc kubenswrapper[4791]: E1208 22:22:58.599040 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:22:58 crc kubenswrapper[4791]: I1208 22:22:58.599195 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:22:58 crc kubenswrapper[4791]: E1208 22:22:58.599490 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:23:00 crc kubenswrapper[4791]: I1208 22:23:00.029762 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:23:00 crc kubenswrapper[4791]: I1208 22:23:00.091041 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:23:00 crc kubenswrapper[4791]: I1208 22:23:00.267469 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hccgz"] Dec 08 22:23:01 crc kubenswrapper[4791]: I1208 22:23:01.702244 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hccgz" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="registry-server" containerID="cri-o://ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811" gracePeriod=2 Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.306660 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.385673 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-utilities\") pod \"52797cda-77dc-4741-82f8-6fcdcb338995\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.385982 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk7gp\" (UniqueName: \"kubernetes.io/projected/52797cda-77dc-4741-82f8-6fcdcb338995-kube-api-access-gk7gp\") pod \"52797cda-77dc-4741-82f8-6fcdcb338995\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.386074 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-catalog-content\") pod \"52797cda-77dc-4741-82f8-6fcdcb338995\" (UID: \"52797cda-77dc-4741-82f8-6fcdcb338995\") " Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.386686 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-utilities" (OuterVolumeSpecName: "utilities") pod "52797cda-77dc-4741-82f8-6fcdcb338995" (UID: "52797cda-77dc-4741-82f8-6fcdcb338995"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.394394 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52797cda-77dc-4741-82f8-6fcdcb338995-kube-api-access-gk7gp" (OuterVolumeSpecName: "kube-api-access-gk7gp") pod "52797cda-77dc-4741-82f8-6fcdcb338995" (UID: "52797cda-77dc-4741-82f8-6fcdcb338995"). InnerVolumeSpecName "kube-api-access-gk7gp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.404506 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.432655 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52797cda-77dc-4741-82f8-6fcdcb338995" (UID: "52797cda-77dc-4741-82f8-6fcdcb338995"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.465503 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.492495 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.492549 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52797cda-77dc-4741-82f8-6fcdcb338995-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.492562 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk7gp\" (UniqueName: \"kubernetes.io/projected/52797cda-77dc-4741-82f8-6fcdcb338995-kube-api-access-gk7gp\") on node \"crc\" DevicePath \"\"" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.713879 4791 generic.go:334] "Generic (PLEG): container finished" podID="52797cda-77dc-4741-82f8-6fcdcb338995" containerID="ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811" exitCode=0 Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.713969 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hccgz" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.713971 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hccgz" event={"ID":"52797cda-77dc-4741-82f8-6fcdcb338995","Type":"ContainerDied","Data":"ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811"} Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.714036 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hccgz" event={"ID":"52797cda-77dc-4741-82f8-6fcdcb338995","Type":"ContainerDied","Data":"0fa0d6216cbcd5b3b379eae6a2b918b1b985771f5bbd920850062eb96e0878a6"} Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.714058 4791 scope.go:117] "RemoveContainer" containerID="ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.738082 4791 scope.go:117] "RemoveContainer" containerID="f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.755777 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hccgz"] Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.765230 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hccgz"] Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.772938 4791 scope.go:117] "RemoveContainer" containerID="d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.840567 4791 scope.go:117] "RemoveContainer" containerID="ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811" Dec 08 22:23:02 crc kubenswrapper[4791]: E1208 22:23:02.841210 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811\": container with ID starting with ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811 not found: ID does not exist" containerID="ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.841242 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811"} err="failed to get container status \"ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811\": rpc error: code = NotFound desc = could not find container \"ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811\": container with ID starting with ccea17617fb4f873322d6a3ab951d605ccf374582ba5caf143a1ca6c3bc69811 not found: ID does not exist" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.841262 4791 scope.go:117] "RemoveContainer" containerID="f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f" Dec 08 22:23:02 crc kubenswrapper[4791]: E1208 22:23:02.841524 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f\": container with ID starting with f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f not found: ID does not exist" containerID="f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.841553 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f"} err="failed to get container status \"f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f\": rpc error: code = NotFound desc = could not find container \"f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f\": container with ID starting with f10eff755b4cda9e3e9efdcc4b5a8d79af3a4f131770b31206b6996cf9cfc26f not found: ID does not exist" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.841568 4791 scope.go:117] "RemoveContainer" containerID="d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b" Dec 08 22:23:02 crc kubenswrapper[4791]: E1208 22:23:02.841875 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b\": container with ID starting with d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b not found: ID does not exist" containerID="d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b" Dec 08 22:23:02 crc kubenswrapper[4791]: I1208 22:23:02.841900 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b"} err="failed to get container status \"d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b\": rpc error: code = NotFound desc = could not find container \"d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b\": container with ID starting with d7e770cc157921a96deb96a10bdd848b576b8c7a3e87bb40f09e64da3923db4b not found: ID does not exist" Dec 08 22:23:03 crc kubenswrapper[4791]: I1208 22:23:03.613583 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" path="/var/lib/kubelet/pods/52797cda-77dc-4741-82f8-6fcdcb338995/volumes" Dec 08 22:23:04 crc kubenswrapper[4791]: I1208 22:23:04.469228 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xkbwz"] Dec 08 22:23:04 crc kubenswrapper[4791]: I1208 22:23:04.469565 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xkbwz" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="registry-server" containerID="cri-o://c25f2b5e2d348550dfca71c35c11ca3aebfe508484eb200934d43b8a830db0a5" gracePeriod=2 Dec 08 22:23:04 crc kubenswrapper[4791]: I1208 22:23:04.750324 4791 generic.go:334] "Generic (PLEG): container finished" podID="8234b10a-6f7f-4651-a230-b427dc587412" containerID="c25f2b5e2d348550dfca71c35c11ca3aebfe508484eb200934d43b8a830db0a5" exitCode=0 Dec 08 22:23:04 crc kubenswrapper[4791]: I1208 22:23:04.750653 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerDied","Data":"c25f2b5e2d348550dfca71c35c11ca3aebfe508484eb200934d43b8a830db0a5"} Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.303253 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.485315 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-catalog-content\") pod \"8234b10a-6f7f-4651-a230-b427dc587412\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.485678 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsh4c\" (UniqueName: \"kubernetes.io/projected/8234b10a-6f7f-4651-a230-b427dc587412-kube-api-access-xsh4c\") pod \"8234b10a-6f7f-4651-a230-b427dc587412\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.485904 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-utilities\") pod \"8234b10a-6f7f-4651-a230-b427dc587412\" (UID: \"8234b10a-6f7f-4651-a230-b427dc587412\") " Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.486852 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-utilities" (OuterVolumeSpecName: "utilities") pod "8234b10a-6f7f-4651-a230-b427dc587412" (UID: "8234b10a-6f7f-4651-a230-b427dc587412"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.490743 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8234b10a-6f7f-4651-a230-b427dc587412-kube-api-access-xsh4c" (OuterVolumeSpecName: "kube-api-access-xsh4c") pod "8234b10a-6f7f-4651-a230-b427dc587412" (UID: "8234b10a-6f7f-4651-a230-b427dc587412"). InnerVolumeSpecName "kube-api-access-xsh4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.589926 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.589980 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsh4c\" (UniqueName: \"kubernetes.io/projected/8234b10a-6f7f-4651-a230-b427dc587412-kube-api-access-xsh4c\") on node \"crc\" DevicePath \"\"" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.593999 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8234b10a-6f7f-4651-a230-b427dc587412" (UID: "8234b10a-6f7f-4651-a230-b427dc587412"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.692374 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8234b10a-6f7f-4651-a230-b427dc587412-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.767923 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkbwz" event={"ID":"8234b10a-6f7f-4651-a230-b427dc587412","Type":"ContainerDied","Data":"ad1884d0cbefacb01c59f9c34483ca1b02ca6b5337290014c124a4d0a2cfb168"} Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.768017 4791 scope.go:117] "RemoveContainer" containerID="c25f2b5e2d348550dfca71c35c11ca3aebfe508484eb200934d43b8a830db0a5" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.768268 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkbwz" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.811367 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xkbwz"] Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.813419 4791 scope.go:117] "RemoveContainer" containerID="adcab5380698f4caf9b4f92d11e742d6c0d7f2bc8f1145498706826d7eef5e1b" Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.856212 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xkbwz"] Dec 08 22:23:05 crc kubenswrapper[4791]: I1208 22:23:05.870801 4791 scope.go:117] "RemoveContainer" containerID="b18df498b307a63cdc01ca98266ee4ddb8cba0a82cbee2cb5e22a1a352d6a913" Dec 08 22:23:07 crc kubenswrapper[4791]: I1208 22:23:07.611475 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8234b10a-6f7f-4651-a230-b427dc587412" path="/var/lib/kubelet/pods/8234b10a-6f7f-4651-a230-b427dc587412/volumes" Dec 08 22:23:09 crc kubenswrapper[4791]: I1208 22:23:09.598566 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:23:09 crc kubenswrapper[4791]: E1208 22:23:09.600007 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:23:10 crc kubenswrapper[4791]: I1208 22:23:10.599011 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:23:10 crc kubenswrapper[4791]: E1208 22:23:10.600411 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:23:23 crc kubenswrapper[4791]: I1208 22:23:23.605151 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:23:23 crc kubenswrapper[4791]: E1208 22:23:23.605931 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:23:24 crc kubenswrapper[4791]: I1208 22:23:24.598251 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:23:24 crc kubenswrapper[4791]: E1208 22:23:24.599018 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:23:34 crc kubenswrapper[4791]: I1208 22:23:34.597636 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:23:35 crc kubenswrapper[4791]: I1208 22:23:35.094898 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141"} Dec 08 22:23:35 crc kubenswrapper[4791]: I1208 22:23:35.095328 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:23:39 crc kubenswrapper[4791]: I1208 22:23:39.598498 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:23:39 crc kubenswrapper[4791]: E1208 22:23:39.599259 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:23:45 crc kubenswrapper[4791]: I1208 22:23:45.886410 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:23:53 crc kubenswrapper[4791]: I1208 22:23:53.604773 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:23:53 crc kubenswrapper[4791]: E1208 22:23:53.605939 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:24:06 crc kubenswrapper[4791]: I1208 22:24:06.599546 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:24:06 crc kubenswrapper[4791]: E1208 22:24:06.600403 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:24:21 crc kubenswrapper[4791]: I1208 22:24:21.597974 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:24:21 crc kubenswrapper[4791]: E1208 22:24:21.598741 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:24:34 crc kubenswrapper[4791]: I1208 22:24:34.598419 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:24:34 crc kubenswrapper[4791]: E1208 22:24:34.599197 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:24:48 crc kubenswrapper[4791]: I1208 22:24:48.598372 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:24:48 crc kubenswrapper[4791]: E1208 22:24:48.599274 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:24:59 crc kubenswrapper[4791]: I1208 22:24:59.597993 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:24:59 crc kubenswrapper[4791]: E1208 22:24:59.598833 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:25:13 crc kubenswrapper[4791]: I1208 22:25:13.613228 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:25:14 crc kubenswrapper[4791]: I1208 22:25:14.139171 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"56ea5ba8f39b94d8e41ac61700e1825df023f29cb0f944779c6114033e9a7f0f"} Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.003379 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4srgq"] Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004622 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="extract-utilities" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004640 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="extract-utilities" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004653 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="extract-content" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004661 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="extract-content" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004675 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="extract-content" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004683 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="extract-content" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004750 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004759 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004781 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004788 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004807 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="extract-utilities" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004817 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="extract-utilities" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004841 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="extract-content" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004848 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="extract-content" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004861 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="extract-utilities" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004868 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="extract-utilities" Dec 08 22:26:04 crc kubenswrapper[4791]: E1208 22:26:04.004878 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.004885 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.005160 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="8234b10a-6f7f-4651-a230-b427dc587412" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.005185 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="06fc921b-5c3d-4d45-ba21-543cc622e482" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.005200 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="52797cda-77dc-4741-82f8-6fcdcb338995" containerName="registry-server" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.007331 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.015437 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4srgq"] Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.135613 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-utilities\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.135811 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thp9l\" (UniqueName: \"kubernetes.io/projected/d48e79d4-f8b6-4076-87cb-45dd06c8c926-kube-api-access-thp9l\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.135874 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-catalog-content\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.238088 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-catalog-content\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.238265 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-utilities\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.238347 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thp9l\" (UniqueName: \"kubernetes.io/projected/d48e79d4-f8b6-4076-87cb-45dd06c8c926-kube-api-access-thp9l\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.238751 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-catalog-content\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.238970 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-utilities\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.257567 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thp9l\" (UniqueName: \"kubernetes.io/projected/d48e79d4-f8b6-4076-87cb-45dd06c8c926-kube-api-access-thp9l\") pod \"community-operators-4srgq\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.339374 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:04 crc kubenswrapper[4791]: I1208 22:26:04.826468 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4srgq"] Dec 08 22:26:05 crc kubenswrapper[4791]: I1208 22:26:05.684489 4791 generic.go:334] "Generic (PLEG): container finished" podID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerID="439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672" exitCode=0 Dec 08 22:26:05 crc kubenswrapper[4791]: I1208 22:26:05.685218 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4srgq" event={"ID":"d48e79d4-f8b6-4076-87cb-45dd06c8c926","Type":"ContainerDied","Data":"439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672"} Dec 08 22:26:05 crc kubenswrapper[4791]: I1208 22:26:05.685764 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4srgq" event={"ID":"d48e79d4-f8b6-4076-87cb-45dd06c8c926","Type":"ContainerStarted","Data":"d071809bc245cae4b62c81d1d5e7e2cf925b4cca9751320d3f0aaea536300056"} Dec 08 22:26:06 crc kubenswrapper[4791]: I1208 22:26:06.697320 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4srgq" event={"ID":"d48e79d4-f8b6-4076-87cb-45dd06c8c926","Type":"ContainerStarted","Data":"74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0"} Dec 08 22:26:07 crc kubenswrapper[4791]: I1208 22:26:07.708186 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" exitCode=1 Dec 08 22:26:07 crc kubenswrapper[4791]: I1208 22:26:07.708260 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141"} Dec 08 22:26:07 crc kubenswrapper[4791]: I1208 22:26:07.708519 4791 scope.go:117] "RemoveContainer" containerID="6d7e6e059fca5ac1250166fda020f57791fcbab8e8e2a0f76b741e5cf639a69d" Dec 08 22:26:07 crc kubenswrapper[4791]: I1208 22:26:07.709263 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:26:07 crc kubenswrapper[4791]: E1208 22:26:07.709513 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:26:07 crc kubenswrapper[4791]: I1208 22:26:07.710400 4791 generic.go:334] "Generic (PLEG): container finished" podID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerID="74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0" exitCode=0 Dec 08 22:26:07 crc kubenswrapper[4791]: I1208 22:26:07.710457 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4srgq" event={"ID":"d48e79d4-f8b6-4076-87cb-45dd06c8c926","Type":"ContainerDied","Data":"74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0"} Dec 08 22:26:09 crc kubenswrapper[4791]: I1208 22:26:09.733226 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4srgq" event={"ID":"d48e79d4-f8b6-4076-87cb-45dd06c8c926","Type":"ContainerStarted","Data":"f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b"} Dec 08 22:26:09 crc kubenswrapper[4791]: I1208 22:26:09.763415 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4srgq" podStartSLOduration=4.104203392 podStartE2EDuration="6.763391958s" podCreationTimestamp="2025-12-08 22:26:03 +0000 UTC" firstStartedPulling="2025-12-08 22:26:05.686350534 +0000 UTC m=+4042.385108889" lastFinishedPulling="2025-12-08 22:26:08.34553911 +0000 UTC m=+4045.044297455" observedRunningTime="2025-12-08 22:26:09.750465934 +0000 UTC m=+4046.449224279" watchObservedRunningTime="2025-12-08 22:26:09.763391958 +0000 UTC m=+4046.462150303" Dec 08 22:26:14 crc kubenswrapper[4791]: I1208 22:26:14.339986 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:14 crc kubenswrapper[4791]: I1208 22:26:14.340481 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:14 crc kubenswrapper[4791]: I1208 22:26:14.387410 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:14 crc kubenswrapper[4791]: I1208 22:26:14.840980 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:15 crc kubenswrapper[4791]: I1208 22:26:15.884100 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:26:15 crc kubenswrapper[4791]: I1208 22:26:15.885505 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:26:15 crc kubenswrapper[4791]: E1208 22:26:15.885841 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.190890 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4srgq"] Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.191886 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4srgq" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="registry-server" containerID="cri-o://f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b" gracePeriod=2 Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.720743 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.797527 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-utilities\") pod \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.797607 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-catalog-content\") pod \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.797661 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thp9l\" (UniqueName: \"kubernetes.io/projected/d48e79d4-f8b6-4076-87cb-45dd06c8c926-kube-api-access-thp9l\") pod \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\" (UID: \"d48e79d4-f8b6-4076-87cb-45dd06c8c926\") " Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.798881 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-utilities" (OuterVolumeSpecName: "utilities") pod "d48e79d4-f8b6-4076-87cb-45dd06c8c926" (UID: "d48e79d4-f8b6-4076-87cb-45dd06c8c926"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.814548 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d48e79d4-f8b6-4076-87cb-45dd06c8c926-kube-api-access-thp9l" (OuterVolumeSpecName: "kube-api-access-thp9l") pod "d48e79d4-f8b6-4076-87cb-45dd06c8c926" (UID: "d48e79d4-f8b6-4076-87cb-45dd06c8c926"). InnerVolumeSpecName "kube-api-access-thp9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.842329 4791 generic.go:334] "Generic (PLEG): container finished" podID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerID="f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b" exitCode=0 Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.842381 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4srgq" event={"ID":"d48e79d4-f8b6-4076-87cb-45dd06c8c926","Type":"ContainerDied","Data":"f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b"} Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.842415 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4srgq" event={"ID":"d48e79d4-f8b6-4076-87cb-45dd06c8c926","Type":"ContainerDied","Data":"d071809bc245cae4b62c81d1d5e7e2cf925b4cca9751320d3f0aaea536300056"} Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.842433 4791 scope.go:117] "RemoveContainer" containerID="f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.842619 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4srgq" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.863266 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d48e79d4-f8b6-4076-87cb-45dd06c8c926" (UID: "d48e79d4-f8b6-4076-87cb-45dd06c8c926"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.879072 4791 scope.go:117] "RemoveContainer" containerID="74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.900505 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.900537 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d48e79d4-f8b6-4076-87cb-45dd06c8c926-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.900550 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thp9l\" (UniqueName: \"kubernetes.io/projected/d48e79d4-f8b6-4076-87cb-45dd06c8c926-kube-api-access-thp9l\") on node \"crc\" DevicePath \"\"" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.904404 4791 scope.go:117] "RemoveContainer" containerID="439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.955816 4791 scope.go:117] "RemoveContainer" containerID="f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b" Dec 08 22:26:18 crc kubenswrapper[4791]: E1208 22:26:18.956293 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b\": container with ID starting with f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b not found: ID does not exist" containerID="f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.956402 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b"} err="failed to get container status \"f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b\": rpc error: code = NotFound desc = could not find container \"f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b\": container with ID starting with f955acc34a4a08d5d4dca845e5358ec942cbd78fa6c0d031bb51a63c17b5e28b not found: ID does not exist" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.956489 4791 scope.go:117] "RemoveContainer" containerID="74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0" Dec 08 22:26:18 crc kubenswrapper[4791]: E1208 22:26:18.956984 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0\": container with ID starting with 74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0 not found: ID does not exist" containerID="74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.957115 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0"} err="failed to get container status \"74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0\": rpc error: code = NotFound desc = could not find container \"74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0\": container with ID starting with 74d8713d898770b368315e08ecd058ca06ab2f531238bcc015c47762d79f3bb0 not found: ID does not exist" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.957194 4791 scope.go:117] "RemoveContainer" containerID="439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672" Dec 08 22:26:18 crc kubenswrapper[4791]: E1208 22:26:18.957581 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672\": container with ID starting with 439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672 not found: ID does not exist" containerID="439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672" Dec 08 22:26:18 crc kubenswrapper[4791]: I1208 22:26:18.957610 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672"} err="failed to get container status \"439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672\": rpc error: code = NotFound desc = could not find container \"439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672\": container with ID starting with 439590787bcc98e5dd62dfacc78ac1478cc711874ec77e6ed73bf5e295033672 not found: ID does not exist" Dec 08 22:26:19 crc kubenswrapper[4791]: I1208 22:26:19.178641 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4srgq"] Dec 08 22:26:19 crc kubenswrapper[4791]: I1208 22:26:19.188335 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4srgq"] Dec 08 22:26:19 crc kubenswrapper[4791]: I1208 22:26:19.611016 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" path="/var/lib/kubelet/pods/d48e79d4-f8b6-4076-87cb-45dd06c8c926/volumes" Dec 08 22:26:25 crc kubenswrapper[4791]: I1208 22:26:25.884284 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:26:25 crc kubenswrapper[4791]: I1208 22:26:25.885660 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:26:25 crc kubenswrapper[4791]: E1208 22:26:25.886008 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:26:39 crc kubenswrapper[4791]: I1208 22:26:39.597600 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:26:39 crc kubenswrapper[4791]: E1208 22:26:39.598219 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:26:53 crc kubenswrapper[4791]: I1208 22:26:53.606509 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:26:53 crc kubenswrapper[4791]: E1208 22:26:53.608139 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:27:05 crc kubenswrapper[4791]: I1208 22:27:05.598053 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:27:05 crc kubenswrapper[4791]: E1208 22:27:05.598644 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:27:20 crc kubenswrapper[4791]: I1208 22:27:20.598727 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:27:20 crc kubenswrapper[4791]: E1208 22:27:20.599530 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:27:31 crc kubenswrapper[4791]: I1208 22:27:31.598429 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:27:31 crc kubenswrapper[4791]: E1208 22:27:31.599250 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:27:35 crc kubenswrapper[4791]: I1208 22:27:35.251848 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:27:35 crc kubenswrapper[4791]: I1208 22:27:35.252375 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:27:42 crc kubenswrapper[4791]: I1208 22:27:42.598585 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:27:42 crc kubenswrapper[4791]: E1208 22:27:42.599343 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:27:55 crc kubenswrapper[4791]: I1208 22:27:55.598775 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:27:55 crc kubenswrapper[4791]: E1208 22:27:55.599730 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:28:05 crc kubenswrapper[4791]: I1208 22:28:05.251239 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:28:05 crc kubenswrapper[4791]: I1208 22:28:05.251850 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:28:10 crc kubenswrapper[4791]: I1208 22:28:10.597722 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:28:10 crc kubenswrapper[4791]: E1208 22:28:10.598447 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:28:21 crc kubenswrapper[4791]: I1208 22:28:21.600049 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:28:21 crc kubenswrapper[4791]: E1208 22:28:21.601970 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:28:34 crc kubenswrapper[4791]: I1208 22:28:34.598532 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:28:34 crc kubenswrapper[4791]: E1208 22:28:34.599385 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:28:35 crc kubenswrapper[4791]: I1208 22:28:35.251206 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:28:35 crc kubenswrapper[4791]: I1208 22:28:35.251531 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:28:35 crc kubenswrapper[4791]: I1208 22:28:35.251574 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:28:35 crc kubenswrapper[4791]: I1208 22:28:35.252467 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"56ea5ba8f39b94d8e41ac61700e1825df023f29cb0f944779c6114033e9a7f0f"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:28:35 crc kubenswrapper[4791]: I1208 22:28:35.252524 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://56ea5ba8f39b94d8e41ac61700e1825df023f29cb0f944779c6114033e9a7f0f" gracePeriod=600 Dec 08 22:28:36 crc kubenswrapper[4791]: I1208 22:28:36.147597 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="56ea5ba8f39b94d8e41ac61700e1825df023f29cb0f944779c6114033e9a7f0f" exitCode=0 Dec 08 22:28:36 crc kubenswrapper[4791]: I1208 22:28:36.147671 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"56ea5ba8f39b94d8e41ac61700e1825df023f29cb0f944779c6114033e9a7f0f"} Dec 08 22:28:36 crc kubenswrapper[4791]: I1208 22:28:36.148916 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73"} Dec 08 22:28:36 crc kubenswrapper[4791]: I1208 22:28:36.148951 4791 scope.go:117] "RemoveContainer" containerID="2e2170d7813848aafc86390ba56331b69fab25fa48d999b5edf9d163454cd903" Dec 08 22:28:46 crc kubenswrapper[4791]: I1208 22:28:46.598556 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:28:46 crc kubenswrapper[4791]: E1208 22:28:46.599379 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:29:01 crc kubenswrapper[4791]: I1208 22:29:01.598236 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:29:01 crc kubenswrapper[4791]: E1208 22:29:01.599034 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:29:16 crc kubenswrapper[4791]: I1208 22:29:16.597818 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:29:16 crc kubenswrapper[4791]: E1208 22:29:16.598613 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:29:30 crc kubenswrapper[4791]: I1208 22:29:30.597676 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:29:30 crc kubenswrapper[4791]: E1208 22:29:30.598498 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:29:42 crc kubenswrapper[4791]: I1208 22:29:42.597891 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:29:42 crc kubenswrapper[4791]: E1208 22:29:42.598629 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:29:57 crc kubenswrapper[4791]: I1208 22:29:57.597743 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:29:57 crc kubenswrapper[4791]: E1208 22:29:57.598571 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.181741 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj"] Dec 08 22:30:00 crc kubenswrapper[4791]: E1208 22:30:00.182645 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="extract-content" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.182660 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="extract-content" Dec 08 22:30:00 crc kubenswrapper[4791]: E1208 22:30:00.182694 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="registry-server" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.182700 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="registry-server" Dec 08 22:30:00 crc kubenswrapper[4791]: E1208 22:30:00.182731 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="extract-utilities" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.182739 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="extract-utilities" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.182953 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d48e79d4-f8b6-4076-87cb-45dd06c8c926" containerName="registry-server" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.183979 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.186305 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.186613 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.193685 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj"] Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.325407 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fbfafba9-d562-4c55-8f33-5d93b0c5f651-config-volume\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.325905 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkns2\" (UniqueName: \"kubernetes.io/projected/fbfafba9-d562-4c55-8f33-5d93b0c5f651-kube-api-access-xkns2\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.325980 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fbfafba9-d562-4c55-8f33-5d93b0c5f651-secret-volume\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.428486 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fbfafba9-d562-4c55-8f33-5d93b0c5f651-secret-volume\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.428742 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fbfafba9-d562-4c55-8f33-5d93b0c5f651-config-volume\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.428826 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkns2\" (UniqueName: \"kubernetes.io/projected/fbfafba9-d562-4c55-8f33-5d93b0c5f651-kube-api-access-xkns2\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.429883 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fbfafba9-d562-4c55-8f33-5d93b0c5f651-config-volume\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.435183 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fbfafba9-d562-4c55-8f33-5d93b0c5f651-secret-volume\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.444891 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkns2\" (UniqueName: \"kubernetes.io/projected/fbfafba9-d562-4c55-8f33-5d93b0c5f651-kube-api-access-xkns2\") pod \"collect-profiles-29420550-sdrhj\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:00 crc kubenswrapper[4791]: I1208 22:30:00.515974 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:01 crc kubenswrapper[4791]: I1208 22:30:01.000513 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj"] Dec 08 22:30:01 crc kubenswrapper[4791]: I1208 22:30:01.997268 4791 generic.go:334] "Generic (PLEG): container finished" podID="fbfafba9-d562-4c55-8f33-5d93b0c5f651" containerID="92f940822e41fd253c6b95748f3ede55357d6c91a7a8a221f28045decfa8643f" exitCode=0 Dec 08 22:30:01 crc kubenswrapper[4791]: I1208 22:30:01.997755 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" event={"ID":"fbfafba9-d562-4c55-8f33-5d93b0c5f651","Type":"ContainerDied","Data":"92f940822e41fd253c6b95748f3ede55357d6c91a7a8a221f28045decfa8643f"} Dec 08 22:30:01 crc kubenswrapper[4791]: I1208 22:30:01.997832 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" event={"ID":"fbfafba9-d562-4c55-8f33-5d93b0c5f651","Type":"ContainerStarted","Data":"457e04c538e0e6a17000e3ea9d13dc14961cfc793ca6b8bfb0b6a76076692be8"} Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.437924 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.604318 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fbfafba9-d562-4c55-8f33-5d93b0c5f651-secret-volume\") pod \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.604563 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fbfafba9-d562-4c55-8f33-5d93b0c5f651-config-volume\") pod \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.604605 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkns2\" (UniqueName: \"kubernetes.io/projected/fbfafba9-d562-4c55-8f33-5d93b0c5f651-kube-api-access-xkns2\") pod \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\" (UID: \"fbfafba9-d562-4c55-8f33-5d93b0c5f651\") " Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.605352 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbfafba9-d562-4c55-8f33-5d93b0c5f651-config-volume" (OuterVolumeSpecName: "config-volume") pod "fbfafba9-d562-4c55-8f33-5d93b0c5f651" (UID: "fbfafba9-d562-4c55-8f33-5d93b0c5f651"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.610549 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbfafba9-d562-4c55-8f33-5d93b0c5f651-kube-api-access-xkns2" (OuterVolumeSpecName: "kube-api-access-xkns2") pod "fbfafba9-d562-4c55-8f33-5d93b0c5f651" (UID: "fbfafba9-d562-4c55-8f33-5d93b0c5f651"). InnerVolumeSpecName "kube-api-access-xkns2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.613426 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbfafba9-d562-4c55-8f33-5d93b0c5f651-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fbfafba9-d562-4c55-8f33-5d93b0c5f651" (UID: "fbfafba9-d562-4c55-8f33-5d93b0c5f651"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.707378 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fbfafba9-d562-4c55-8f33-5d93b0c5f651-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.707415 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fbfafba9-d562-4c55-8f33-5d93b0c5f651-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:30:03 crc kubenswrapper[4791]: I1208 22:30:03.707433 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkns2\" (UniqueName: \"kubernetes.io/projected/fbfafba9-d562-4c55-8f33-5d93b0c5f651-kube-api-access-xkns2\") on node \"crc\" DevicePath \"\"" Dec 08 22:30:04 crc kubenswrapper[4791]: I1208 22:30:04.017811 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" event={"ID":"fbfafba9-d562-4c55-8f33-5d93b0c5f651","Type":"ContainerDied","Data":"457e04c538e0e6a17000e3ea9d13dc14961cfc793ca6b8bfb0b6a76076692be8"} Dec 08 22:30:04 crc kubenswrapper[4791]: I1208 22:30:04.018166 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="457e04c538e0e6a17000e3ea9d13dc14961cfc793ca6b8bfb0b6a76076692be8" Dec 08 22:30:04 crc kubenswrapper[4791]: I1208 22:30:04.017887 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420550-sdrhj" Dec 08 22:30:04 crc kubenswrapper[4791]: I1208 22:30:04.510960 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz"] Dec 08 22:30:04 crc kubenswrapper[4791]: I1208 22:30:04.520913 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-ztjvz"] Dec 08 22:30:05 crc kubenswrapper[4791]: I1208 22:30:05.611451 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbd46a46-01ac-4b1c-b0b8-8f5cda691535" path="/var/lib/kubelet/pods/cbd46a46-01ac-4b1c-b0b8-8f5cda691535/volumes" Dec 08 22:30:11 crc kubenswrapper[4791]: I1208 22:30:11.598338 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:30:11 crc kubenswrapper[4791]: E1208 22:30:11.599132 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:30:19 crc kubenswrapper[4791]: I1208 22:30:19.268086 4791 scope.go:117] "RemoveContainer" containerID="817238babfa892405415f9c41dc201222440e7b87a670dd3ea6cd71bdb92f767" Dec 08 22:30:24 crc kubenswrapper[4791]: I1208 22:30:24.599152 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:30:24 crc kubenswrapper[4791]: E1208 22:30:24.600104 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:30:35 crc kubenswrapper[4791]: I1208 22:30:35.251150 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:30:35 crc kubenswrapper[4791]: I1208 22:30:35.251624 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:30:39 crc kubenswrapper[4791]: I1208 22:30:39.598869 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:30:39 crc kubenswrapper[4791]: E1208 22:30:39.599694 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:30:51 crc kubenswrapper[4791]: I1208 22:30:51.598223 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:30:51 crc kubenswrapper[4791]: E1208 22:30:51.599061 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:31:05 crc kubenswrapper[4791]: I1208 22:31:05.252357 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:31:05 crc kubenswrapper[4791]: I1208 22:31:05.252921 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:31:05 crc kubenswrapper[4791]: I1208 22:31:05.599390 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:31:05 crc kubenswrapper[4791]: E1208 22:31:05.599809 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:31:19 crc kubenswrapper[4791]: I1208 22:31:19.603612 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:31:20 crc kubenswrapper[4791]: I1208 22:31:20.801330 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d"} Dec 08 22:31:20 crc kubenswrapper[4791]: I1208 22:31:20.802849 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:31:25 crc kubenswrapper[4791]: I1208 22:31:25.887604 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.251245 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.251863 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.251922 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.252972 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.253054 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" gracePeriod=600 Dec 08 22:31:35 crc kubenswrapper[4791]: E1208 22:31:35.425412 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.944506 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" exitCode=0 Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.944568 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73"} Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.944607 4791 scope.go:117] "RemoveContainer" containerID="56ea5ba8f39b94d8e41ac61700e1825df023f29cb0f944779c6114033e9a7f0f" Dec 08 22:31:35 crc kubenswrapper[4791]: I1208 22:31:35.945700 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:31:35 crc kubenswrapper[4791]: E1208 22:31:35.946236 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:31:50 crc kubenswrapper[4791]: I1208 22:31:50.598213 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:31:50 crc kubenswrapper[4791]: E1208 22:31:50.599126 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:32:01 crc kubenswrapper[4791]: I1208 22:32:01.598686 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:32:01 crc kubenswrapper[4791]: E1208 22:32:01.599677 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:32:15 crc kubenswrapper[4791]: I1208 22:32:15.599008 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:32:15 crc kubenswrapper[4791]: E1208 22:32:15.600086 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:32:26 crc kubenswrapper[4791]: I1208 22:32:26.598254 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:32:26 crc kubenswrapper[4791]: E1208 22:32:26.599171 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:32:39 crc kubenswrapper[4791]: I1208 22:32:39.598576 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:32:39 crc kubenswrapper[4791]: E1208 22:32:39.599402 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.175517 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zfwzc"] Dec 08 22:32:46 crc kubenswrapper[4791]: E1208 22:32:46.176550 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbfafba9-d562-4c55-8f33-5d93b0c5f651" containerName="collect-profiles" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.176565 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbfafba9-d562-4c55-8f33-5d93b0c5f651" containerName="collect-profiles" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.176834 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbfafba9-d562-4c55-8f33-5d93b0c5f651" containerName="collect-profiles" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.178583 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.186601 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zfwzc"] Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.293073 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44kzc\" (UniqueName: \"kubernetes.io/projected/0947197b-83f9-4442-a663-03554503d009-kube-api-access-44kzc\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.293157 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-catalog-content\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.293300 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-utilities\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.396295 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44kzc\" (UniqueName: \"kubernetes.io/projected/0947197b-83f9-4442-a663-03554503d009-kube-api-access-44kzc\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.396379 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-catalog-content\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.396421 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-utilities\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.397047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-catalog-content\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.397112 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-utilities\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.429940 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44kzc\" (UniqueName: \"kubernetes.io/projected/0947197b-83f9-4442-a663-03554503d009-kube-api-access-44kzc\") pod \"certified-operators-zfwzc\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:46 crc kubenswrapper[4791]: I1208 22:32:46.512028 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:47 crc kubenswrapper[4791]: I1208 22:32:47.027840 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zfwzc"] Dec 08 22:32:47 crc kubenswrapper[4791]: I1208 22:32:47.842986 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfwzc" event={"ID":"0947197b-83f9-4442-a663-03554503d009","Type":"ContainerStarted","Data":"e89f2dcb19ea6816a76f9928032bf51c8cd1b8de26b43f96694932687d293480"} Dec 08 22:32:48 crc kubenswrapper[4791]: I1208 22:32:48.855012 4791 generic.go:334] "Generic (PLEG): container finished" podID="0947197b-83f9-4442-a663-03554503d009" containerID="40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322" exitCode=0 Dec 08 22:32:48 crc kubenswrapper[4791]: I1208 22:32:48.855064 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfwzc" event={"ID":"0947197b-83f9-4442-a663-03554503d009","Type":"ContainerDied","Data":"40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322"} Dec 08 22:32:48 crc kubenswrapper[4791]: I1208 22:32:48.858074 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:32:50 crc kubenswrapper[4791]: I1208 22:32:50.878067 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfwzc" event={"ID":"0947197b-83f9-4442-a663-03554503d009","Type":"ContainerStarted","Data":"42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c"} Dec 08 22:32:51 crc kubenswrapper[4791]: I1208 22:32:51.601847 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:32:51 crc kubenswrapper[4791]: E1208 22:32:51.602424 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:32:51 crc kubenswrapper[4791]: I1208 22:32:51.890420 4791 generic.go:334] "Generic (PLEG): container finished" podID="0947197b-83f9-4442-a663-03554503d009" containerID="42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c" exitCode=0 Dec 08 22:32:51 crc kubenswrapper[4791]: I1208 22:32:51.890469 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfwzc" event={"ID":"0947197b-83f9-4442-a663-03554503d009","Type":"ContainerDied","Data":"42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c"} Dec 08 22:32:52 crc kubenswrapper[4791]: I1208 22:32:52.906482 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfwzc" event={"ID":"0947197b-83f9-4442-a663-03554503d009","Type":"ContainerStarted","Data":"3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144"} Dec 08 22:32:52 crc kubenswrapper[4791]: I1208 22:32:52.927500 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zfwzc" podStartSLOduration=3.477681438 podStartE2EDuration="6.927478723s" podCreationTimestamp="2025-12-08 22:32:46 +0000 UTC" firstStartedPulling="2025-12-08 22:32:48.857819285 +0000 UTC m=+4445.556577630" lastFinishedPulling="2025-12-08 22:32:52.30761657 +0000 UTC m=+4449.006374915" observedRunningTime="2025-12-08 22:32:52.92377951 +0000 UTC m=+4449.622537875" watchObservedRunningTime="2025-12-08 22:32:52.927478723 +0000 UTC m=+4449.626237078" Dec 08 22:32:56 crc kubenswrapper[4791]: I1208 22:32:56.513040 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:56 crc kubenswrapper[4791]: I1208 22:32:56.513542 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:32:56 crc kubenswrapper[4791]: I1208 22:32:56.563238 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:33:06 crc kubenswrapper[4791]: I1208 22:33:06.565777 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:33:06 crc kubenswrapper[4791]: I1208 22:33:06.598081 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:33:06 crc kubenswrapper[4791]: E1208 22:33:06.598422 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:33:06 crc kubenswrapper[4791]: I1208 22:33:06.626494 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zfwzc"] Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.094648 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zfwzc" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="registry-server" containerID="cri-o://3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144" gracePeriod=2 Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.655527 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.826395 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44kzc\" (UniqueName: \"kubernetes.io/projected/0947197b-83f9-4442-a663-03554503d009-kube-api-access-44kzc\") pod \"0947197b-83f9-4442-a663-03554503d009\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.826541 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-catalog-content\") pod \"0947197b-83f9-4442-a663-03554503d009\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.826576 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-utilities\") pod \"0947197b-83f9-4442-a663-03554503d009\" (UID: \"0947197b-83f9-4442-a663-03554503d009\") " Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.827501 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-utilities" (OuterVolumeSpecName: "utilities") pod "0947197b-83f9-4442-a663-03554503d009" (UID: "0947197b-83f9-4442-a663-03554503d009"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.833762 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0947197b-83f9-4442-a663-03554503d009-kube-api-access-44kzc" (OuterVolumeSpecName: "kube-api-access-44kzc") pod "0947197b-83f9-4442-a663-03554503d009" (UID: "0947197b-83f9-4442-a663-03554503d009"). InnerVolumeSpecName "kube-api-access-44kzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.880240 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0947197b-83f9-4442-a663-03554503d009" (UID: "0947197b-83f9-4442-a663-03554503d009"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.931503 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44kzc\" (UniqueName: \"kubernetes.io/projected/0947197b-83f9-4442-a663-03554503d009-kube-api-access-44kzc\") on node \"crc\" DevicePath \"\"" Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.931555 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:33:07 crc kubenswrapper[4791]: I1208 22:33:07.931567 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0947197b-83f9-4442-a663-03554503d009-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.106472 4791 generic.go:334] "Generic (PLEG): container finished" podID="0947197b-83f9-4442-a663-03554503d009" containerID="3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144" exitCode=0 Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.106519 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfwzc" event={"ID":"0947197b-83f9-4442-a663-03554503d009","Type":"ContainerDied","Data":"3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144"} Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.106557 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfwzc" event={"ID":"0947197b-83f9-4442-a663-03554503d009","Type":"ContainerDied","Data":"e89f2dcb19ea6816a76f9928032bf51c8cd1b8de26b43f96694932687d293480"} Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.106572 4791 scope.go:117] "RemoveContainer" containerID="3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.106565 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfwzc" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.137882 4791 scope.go:117] "RemoveContainer" containerID="42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.145187 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zfwzc"] Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.155124 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zfwzc"] Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.159704 4791 scope.go:117] "RemoveContainer" containerID="40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.211367 4791 scope.go:117] "RemoveContainer" containerID="3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144" Dec 08 22:33:08 crc kubenswrapper[4791]: E1208 22:33:08.213177 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144\": container with ID starting with 3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144 not found: ID does not exist" containerID="3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.213236 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144"} err="failed to get container status \"3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144\": rpc error: code = NotFound desc = could not find container \"3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144\": container with ID starting with 3884ed295c1bdd4546c58632e7fcec5dbbd3097c2f282518fdba868ad56cb144 not found: ID does not exist" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.213270 4791 scope.go:117] "RemoveContainer" containerID="42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c" Dec 08 22:33:08 crc kubenswrapper[4791]: E1208 22:33:08.213772 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c\": container with ID starting with 42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c not found: ID does not exist" containerID="42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.213806 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c"} err="failed to get container status \"42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c\": rpc error: code = NotFound desc = could not find container \"42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c\": container with ID starting with 42a2ac68dd1e1fc485c54a924c3b09414d0c4be74fa684fc0847947b4412247c not found: ID does not exist" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.213830 4791 scope.go:117] "RemoveContainer" containerID="40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322" Dec 08 22:33:08 crc kubenswrapper[4791]: E1208 22:33:08.214227 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322\": container with ID starting with 40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322 not found: ID does not exist" containerID="40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322" Dec 08 22:33:08 crc kubenswrapper[4791]: I1208 22:33:08.214258 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322"} err="failed to get container status \"40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322\": rpc error: code = NotFound desc = could not find container \"40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322\": container with ID starting with 40364d55f9263884f5fcf92b8e9e729351346c5208b2d70abfa579794586f322 not found: ID does not exist" Dec 08 22:33:09 crc kubenswrapper[4791]: I1208 22:33:09.613126 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0947197b-83f9-4442-a663-03554503d009" path="/var/lib/kubelet/pods/0947197b-83f9-4442-a663-03554503d009/volumes" Dec 08 22:33:21 crc kubenswrapper[4791]: I1208 22:33:21.597884 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:33:21 crc kubenswrapper[4791]: E1208 22:33:21.598529 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:33:36 crc kubenswrapper[4791]: I1208 22:33:36.599281 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:33:36 crc kubenswrapper[4791]: E1208 22:33:36.599959 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:33:49 crc kubenswrapper[4791]: I1208 22:33:49.599017 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:33:49 crc kubenswrapper[4791]: E1208 22:33:49.599856 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:33:52 crc kubenswrapper[4791]: I1208 22:33:52.559154 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" exitCode=1 Dec 08 22:33:52 crc kubenswrapper[4791]: I1208 22:33:52.559222 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d"} Dec 08 22:33:52 crc kubenswrapper[4791]: I1208 22:33:52.559692 4791 scope.go:117] "RemoveContainer" containerID="d9fa01ccb8c396c7c4d9b876214dcbc0da45e1478887955f6dafccca5f480141" Dec 08 22:33:52 crc kubenswrapper[4791]: I1208 22:33:52.560545 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:33:52 crc kubenswrapper[4791]: E1208 22:33:52.560871 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:33:55 crc kubenswrapper[4791]: I1208 22:33:55.884303 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:33:55 crc kubenswrapper[4791]: I1208 22:33:55.886013 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:33:55 crc kubenswrapper[4791]: E1208 22:33:55.886373 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:34:03 crc kubenswrapper[4791]: I1208 22:34:03.608424 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:34:03 crc kubenswrapper[4791]: E1208 22:34:03.609638 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:34:05 crc kubenswrapper[4791]: I1208 22:34:05.884566 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:34:05 crc kubenswrapper[4791]: I1208 22:34:05.885962 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:34:05 crc kubenswrapper[4791]: E1208 22:34:05.886280 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:34:16 crc kubenswrapper[4791]: I1208 22:34:16.598892 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:34:16 crc kubenswrapper[4791]: E1208 22:34:16.599774 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:34:20 crc kubenswrapper[4791]: I1208 22:34:20.597669 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:34:20 crc kubenswrapper[4791]: E1208 22:34:20.598432 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:34:27 crc kubenswrapper[4791]: I1208 22:34:27.599176 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:34:27 crc kubenswrapper[4791]: E1208 22:34:27.599880 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:34:31 crc kubenswrapper[4791]: I1208 22:34:31.602614 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:34:31 crc kubenswrapper[4791]: E1208 22:34:31.605081 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:34:40 crc kubenswrapper[4791]: I1208 22:34:40.598419 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:34:40 crc kubenswrapper[4791]: E1208 22:34:40.599207 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:34:43 crc kubenswrapper[4791]: I1208 22:34:43.631112 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:34:43 crc kubenswrapper[4791]: E1208 22:34:43.632408 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:34:51 crc kubenswrapper[4791]: I1208 22:34:51.602000 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:34:51 crc kubenswrapper[4791]: E1208 22:34:51.602962 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:34:54 crc kubenswrapper[4791]: I1208 22:34:54.598016 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:34:54 crc kubenswrapper[4791]: E1208 22:34:54.598469 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:35:05 crc kubenswrapper[4791]: I1208 22:35:05.598528 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:35:05 crc kubenswrapper[4791]: E1208 22:35:05.599283 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:35:07 crc kubenswrapper[4791]: I1208 22:35:07.599245 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:35:07 crc kubenswrapper[4791]: E1208 22:35:07.599880 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:35:18 crc kubenswrapper[4791]: I1208 22:35:18.599236 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:35:18 crc kubenswrapper[4791]: E1208 22:35:18.600052 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:35:22 crc kubenswrapper[4791]: I1208 22:35:22.599396 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:35:22 crc kubenswrapper[4791]: E1208 22:35:22.600213 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:35:33 crc kubenswrapper[4791]: I1208 22:35:33.606331 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:35:33 crc kubenswrapper[4791]: E1208 22:35:33.607202 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:35:36 crc kubenswrapper[4791]: I1208 22:35:36.598196 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:35:36 crc kubenswrapper[4791]: E1208 22:35:36.599115 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:35:45 crc kubenswrapper[4791]: I1208 22:35:45.599063 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:35:45 crc kubenswrapper[4791]: E1208 22:35:45.599988 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:35:47 crc kubenswrapper[4791]: I1208 22:35:47.602607 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:35:47 crc kubenswrapper[4791]: E1208 22:35:47.608329 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:35:56 crc kubenswrapper[4791]: I1208 22:35:56.598145 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:35:56 crc kubenswrapper[4791]: E1208 22:35:56.599257 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:36:02 crc kubenswrapper[4791]: I1208 22:36:02.598303 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:36:02 crc kubenswrapper[4791]: E1208 22:36:02.600177 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:36:11 crc kubenswrapper[4791]: I1208 22:36:11.597798 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:36:11 crc kubenswrapper[4791]: E1208 22:36:11.598487 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:36:16 crc kubenswrapper[4791]: I1208 22:36:16.598593 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:36:16 crc kubenswrapper[4791]: E1208 22:36:16.599565 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:36:23 crc kubenswrapper[4791]: I1208 22:36:23.606764 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:36:23 crc kubenswrapper[4791]: E1208 22:36:23.609529 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:36:27 crc kubenswrapper[4791]: I1208 22:36:27.598212 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:36:27 crc kubenswrapper[4791]: E1208 22:36:27.598842 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:36:35 crc kubenswrapper[4791]: I1208 22:36:35.598829 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:36:36 crc kubenswrapper[4791]: I1208 22:36:36.232087 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"3014bf225e3c79e5f5e2055994e09b528c4a4f223d744e19e904fa5ce5728012"} Dec 08 22:36:40 crc kubenswrapper[4791]: I1208 22:36:40.599624 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:36:40 crc kubenswrapper[4791]: E1208 22:36:40.601531 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:36:55 crc kubenswrapper[4791]: I1208 22:36:55.598539 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:36:55 crc kubenswrapper[4791]: E1208 22:36:55.599728 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:37:10 crc kubenswrapper[4791]: I1208 22:37:10.598316 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:37:10 crc kubenswrapper[4791]: E1208 22:37:10.600106 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:37:21 crc kubenswrapper[4791]: I1208 22:37:21.599668 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:37:21 crc kubenswrapper[4791]: E1208 22:37:21.600490 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.076635 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b6m5q"] Dec 08 22:37:28 crc kubenswrapper[4791]: E1208 22:37:28.077820 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="extract-utilities" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.077837 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="extract-utilities" Dec 08 22:37:28 crc kubenswrapper[4791]: E1208 22:37:28.077848 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="registry-server" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.077855 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="registry-server" Dec 08 22:37:28 crc kubenswrapper[4791]: E1208 22:37:28.077913 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="extract-content" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.077921 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="extract-content" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.078187 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0947197b-83f9-4442-a663-03554503d009" containerName="registry-server" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.080419 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.092481 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b6m5q"] Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.240738 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsp2k\" (UniqueName: \"kubernetes.io/projected/2b610af2-5c05-477d-a25b-5e8429c6a38c-kube-api-access-qsp2k\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.240799 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-catalog-content\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.240844 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-utilities\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.343962 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsp2k\" (UniqueName: \"kubernetes.io/projected/2b610af2-5c05-477d-a25b-5e8429c6a38c-kube-api-access-qsp2k\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.344032 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-catalog-content\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.344112 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-utilities\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.344590 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-catalog-content\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.344853 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-utilities\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.364213 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsp2k\" (UniqueName: \"kubernetes.io/projected/2b610af2-5c05-477d-a25b-5e8429c6a38c-kube-api-access-qsp2k\") pod \"community-operators-b6m5q\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.404738 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:28 crc kubenswrapper[4791]: I1208 22:37:28.951051 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b6m5q"] Dec 08 22:37:29 crc kubenswrapper[4791]: I1208 22:37:29.783427 4791 generic.go:334] "Generic (PLEG): container finished" podID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerID="f316e5503be4b3aca2fff81ea7f8789218c8b9dd6434b831e9d12f045f4b9f2b" exitCode=0 Dec 08 22:37:29 crc kubenswrapper[4791]: I1208 22:37:29.783497 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b6m5q" event={"ID":"2b610af2-5c05-477d-a25b-5e8429c6a38c","Type":"ContainerDied","Data":"f316e5503be4b3aca2fff81ea7f8789218c8b9dd6434b831e9d12f045f4b9f2b"} Dec 08 22:37:29 crc kubenswrapper[4791]: I1208 22:37:29.783909 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b6m5q" event={"ID":"2b610af2-5c05-477d-a25b-5e8429c6a38c","Type":"ContainerStarted","Data":"e8923ff9fa4d8e46a431381b732820174c13e085c9d10cdfa6fca13e482ddc78"} Dec 08 22:37:31 crc kubenswrapper[4791]: I1208 22:37:31.805403 4791 generic.go:334] "Generic (PLEG): container finished" podID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerID="6ca046efb571b7e809da1861b9dffa4730880bab0fb7fc55c284eae458014d7f" exitCode=0 Dec 08 22:37:31 crc kubenswrapper[4791]: I1208 22:37:31.805463 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b6m5q" event={"ID":"2b610af2-5c05-477d-a25b-5e8429c6a38c","Type":"ContainerDied","Data":"6ca046efb571b7e809da1861b9dffa4730880bab0fb7fc55c284eae458014d7f"} Dec 08 22:37:33 crc kubenswrapper[4791]: I1208 22:37:33.839915 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b6m5q" event={"ID":"2b610af2-5c05-477d-a25b-5e8429c6a38c","Type":"ContainerStarted","Data":"d638f354433479bd88ffba4cbf74bedcdf302015475e21826159d2bb50f14943"} Dec 08 22:37:33 crc kubenswrapper[4791]: I1208 22:37:33.858940 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b6m5q" podStartSLOduration=3.140784057 podStartE2EDuration="5.858917329s" podCreationTimestamp="2025-12-08 22:37:28 +0000 UTC" firstStartedPulling="2025-12-08 22:37:29.787296131 +0000 UTC m=+4726.486054516" lastFinishedPulling="2025-12-08 22:37:32.505429443 +0000 UTC m=+4729.204187788" observedRunningTime="2025-12-08 22:37:33.854249393 +0000 UTC m=+4730.553007778" watchObservedRunningTime="2025-12-08 22:37:33.858917329 +0000 UTC m=+4730.557675674" Dec 08 22:37:35 crc kubenswrapper[4791]: I1208 22:37:35.597565 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:37:35 crc kubenswrapper[4791]: E1208 22:37:35.598166 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:37:38 crc kubenswrapper[4791]: I1208 22:37:38.405912 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:38 crc kubenswrapper[4791]: I1208 22:37:38.406256 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:38 crc kubenswrapper[4791]: I1208 22:37:38.475614 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:38 crc kubenswrapper[4791]: I1208 22:37:38.933406 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:38 crc kubenswrapper[4791]: I1208 22:37:38.987299 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b6m5q"] Dec 08 22:37:40 crc kubenswrapper[4791]: I1208 22:37:40.910620 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b6m5q" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="registry-server" containerID="cri-o://d638f354433479bd88ffba4cbf74bedcdf302015475e21826159d2bb50f14943" gracePeriod=2 Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.517784 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tg2v6"] Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.521814 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.534155 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tg2v6"] Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.648557 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-catalog-content\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.648947 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57jxr\" (UniqueName: \"kubernetes.io/projected/ca48b62d-ffb4-4dfe-bf79-6124995616ec-kube-api-access-57jxr\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.649212 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-utilities\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.751392 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-catalog-content\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.751497 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57jxr\" (UniqueName: \"kubernetes.io/projected/ca48b62d-ffb4-4dfe-bf79-6124995616ec-kube-api-access-57jxr\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.751613 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-utilities\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.752587 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-catalog-content\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.752658 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-utilities\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.922697 4791 generic.go:334] "Generic (PLEG): container finished" podID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerID="d638f354433479bd88ffba4cbf74bedcdf302015475e21826159d2bb50f14943" exitCode=0 Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.922876 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b6m5q" event={"ID":"2b610af2-5c05-477d-a25b-5e8429c6a38c","Type":"ContainerDied","Data":"d638f354433479bd88ffba4cbf74bedcdf302015475e21826159d2bb50f14943"} Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.923094 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b6m5q" event={"ID":"2b610af2-5c05-477d-a25b-5e8429c6a38c","Type":"ContainerDied","Data":"e8923ff9fa4d8e46a431381b732820174c13e085c9d10cdfa6fca13e482ddc78"} Dec 08 22:37:41 crc kubenswrapper[4791]: I1208 22:37:41.923112 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8923ff9fa4d8e46a431381b732820174c13e085c9d10cdfa6fca13e482ddc78" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.047657 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57jxr\" (UniqueName: \"kubernetes.io/projected/ca48b62d-ffb4-4dfe-bf79-6124995616ec-kube-api-access-57jxr\") pod \"redhat-operators-tg2v6\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.179380 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.196109 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.365838 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-utilities\") pod \"2b610af2-5c05-477d-a25b-5e8429c6a38c\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.366160 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsp2k\" (UniqueName: \"kubernetes.io/projected/2b610af2-5c05-477d-a25b-5e8429c6a38c-kube-api-access-qsp2k\") pod \"2b610af2-5c05-477d-a25b-5e8429c6a38c\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.366338 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-catalog-content\") pod \"2b610af2-5c05-477d-a25b-5e8429c6a38c\" (UID: \"2b610af2-5c05-477d-a25b-5e8429c6a38c\") " Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.366912 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-utilities" (OuterVolumeSpecName: "utilities") pod "2b610af2-5c05-477d-a25b-5e8429c6a38c" (UID: "2b610af2-5c05-477d-a25b-5e8429c6a38c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.370363 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.374843 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b610af2-5c05-477d-a25b-5e8429c6a38c-kube-api-access-qsp2k" (OuterVolumeSpecName: "kube-api-access-qsp2k") pod "2b610af2-5c05-477d-a25b-5e8429c6a38c" (UID: "2b610af2-5c05-477d-a25b-5e8429c6a38c"). InnerVolumeSpecName "kube-api-access-qsp2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.447551 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b610af2-5c05-477d-a25b-5e8429c6a38c" (UID: "2b610af2-5c05-477d-a25b-5e8429c6a38c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.472399 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsp2k\" (UniqueName: \"kubernetes.io/projected/2b610af2-5c05-477d-a25b-5e8429c6a38c-kube-api-access-qsp2k\") on node \"crc\" DevicePath \"\"" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.472436 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b610af2-5c05-477d-a25b-5e8429c6a38c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.703674 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tg2v6"] Dec 08 22:37:42 crc kubenswrapper[4791]: W1208 22:37:42.705199 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca48b62d_ffb4_4dfe_bf79_6124995616ec.slice/crio-16ad9ba4ef89fbf4c64314c6e0d4972b751445fa411d7d4c60c93a3c8abab167 WatchSource:0}: Error finding container 16ad9ba4ef89fbf4c64314c6e0d4972b751445fa411d7d4c60c93a3c8abab167: Status 404 returned error can't find the container with id 16ad9ba4ef89fbf4c64314c6e0d4972b751445fa411d7d4c60c93a3c8abab167 Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.931658 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tg2v6" event={"ID":"ca48b62d-ffb4-4dfe-bf79-6124995616ec","Type":"ContainerStarted","Data":"16ad9ba4ef89fbf4c64314c6e0d4972b751445fa411d7d4c60c93a3c8abab167"} Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.931690 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b6m5q" Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.964915 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b6m5q"] Dec 08 22:37:42 crc kubenswrapper[4791]: I1208 22:37:42.975151 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b6m5q"] Dec 08 22:37:43 crc kubenswrapper[4791]: I1208 22:37:43.619560 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" path="/var/lib/kubelet/pods/2b610af2-5c05-477d-a25b-5e8429c6a38c/volumes" Dec 08 22:37:43 crc kubenswrapper[4791]: I1208 22:37:43.943046 4791 generic.go:334] "Generic (PLEG): container finished" podID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerID="53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce" exitCode=0 Dec 08 22:37:43 crc kubenswrapper[4791]: I1208 22:37:43.943109 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tg2v6" event={"ID":"ca48b62d-ffb4-4dfe-bf79-6124995616ec","Type":"ContainerDied","Data":"53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce"} Dec 08 22:37:44 crc kubenswrapper[4791]: I1208 22:37:44.953328 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tg2v6" event={"ID":"ca48b62d-ffb4-4dfe-bf79-6124995616ec","Type":"ContainerStarted","Data":"410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564"} Dec 08 22:37:48 crc kubenswrapper[4791]: I1208 22:37:48.598556 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:37:48 crc kubenswrapper[4791]: E1208 22:37:48.599383 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.011294 4791 generic.go:334] "Generic (PLEG): container finished" podID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerID="410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564" exitCode=0 Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.011355 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tg2v6" event={"ID":"ca48b62d-ffb4-4dfe-bf79-6124995616ec","Type":"ContainerDied","Data":"410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564"} Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.014214 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.930625 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qcdjc"] Dec 08 22:37:49 crc kubenswrapper[4791]: E1208 22:37:49.931604 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="extract-utilities" Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.931618 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="extract-utilities" Dec 08 22:37:49 crc kubenswrapper[4791]: E1208 22:37:49.931629 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="extract-content" Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.931635 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="extract-content" Dec 08 22:37:49 crc kubenswrapper[4791]: E1208 22:37:49.931656 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="registry-server" Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.931663 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="registry-server" Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.931933 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b610af2-5c05-477d-a25b-5e8429c6a38c" containerName="registry-server" Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.937828 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:49 crc kubenswrapper[4791]: I1208 22:37:49.957132 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcdjc"] Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.026221 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tg2v6" event={"ID":"ca48b62d-ffb4-4dfe-bf79-6124995616ec","Type":"ContainerStarted","Data":"fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3"} Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.047172 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tg2v6" podStartSLOduration=3.558742891 podStartE2EDuration="9.047155931s" podCreationTimestamp="2025-12-08 22:37:41 +0000 UTC" firstStartedPulling="2025-12-08 22:37:43.945504324 +0000 UTC m=+4740.644262669" lastFinishedPulling="2025-12-08 22:37:49.433917364 +0000 UTC m=+4746.132675709" observedRunningTime="2025-12-08 22:37:50.044902895 +0000 UTC m=+4746.743661240" watchObservedRunningTime="2025-12-08 22:37:50.047155931 +0000 UTC m=+4746.745914286" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.089800 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgcfh\" (UniqueName: \"kubernetes.io/projected/845fe551-9f2e-4491-bf66-6a9de3450dab-kube-api-access-qgcfh\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.091186 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-utilities\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.091396 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-catalog-content\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.193911 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-utilities\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.193990 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-catalog-content\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.194043 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgcfh\" (UniqueName: \"kubernetes.io/projected/845fe551-9f2e-4491-bf66-6a9de3450dab-kube-api-access-qgcfh\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.194582 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-utilities\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.194675 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-catalog-content\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.214635 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgcfh\" (UniqueName: \"kubernetes.io/projected/845fe551-9f2e-4491-bf66-6a9de3450dab-kube-api-access-qgcfh\") pod \"redhat-marketplace-qcdjc\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.270790 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:37:50 crc kubenswrapper[4791]: I1208 22:37:50.774870 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcdjc"] Dec 08 22:37:51 crc kubenswrapper[4791]: I1208 22:37:51.036603 4791 generic.go:334] "Generic (PLEG): container finished" podID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerID="36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56" exitCode=0 Dec 08 22:37:51 crc kubenswrapper[4791]: I1208 22:37:51.036680 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcdjc" event={"ID":"845fe551-9f2e-4491-bf66-6a9de3450dab","Type":"ContainerDied","Data":"36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56"} Dec 08 22:37:51 crc kubenswrapper[4791]: I1208 22:37:51.037055 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcdjc" event={"ID":"845fe551-9f2e-4491-bf66-6a9de3450dab","Type":"ContainerStarted","Data":"be6ad778b87ab80a7fa3ec837a43ba147d2ce55457d04ff3ed789310e2efa155"} Dec 08 22:37:52 crc kubenswrapper[4791]: I1208 22:37:52.049484 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcdjc" event={"ID":"845fe551-9f2e-4491-bf66-6a9de3450dab","Type":"ContainerStarted","Data":"257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7"} Dec 08 22:37:52 crc kubenswrapper[4791]: I1208 22:37:52.196762 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:52 crc kubenswrapper[4791]: I1208 22:37:52.196865 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:37:53 crc kubenswrapper[4791]: I1208 22:37:53.062813 4791 generic.go:334] "Generic (PLEG): container finished" podID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerID="257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7" exitCode=0 Dec 08 22:37:53 crc kubenswrapper[4791]: I1208 22:37:53.062846 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcdjc" event={"ID":"845fe551-9f2e-4491-bf66-6a9de3450dab","Type":"ContainerDied","Data":"257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7"} Dec 08 22:37:53 crc kubenswrapper[4791]: I1208 22:37:53.245265 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tg2v6" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="registry-server" probeResult="failure" output=< Dec 08 22:37:53 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 22:37:53 crc kubenswrapper[4791]: > Dec 08 22:37:54 crc kubenswrapper[4791]: I1208 22:37:54.074481 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcdjc" event={"ID":"845fe551-9f2e-4491-bf66-6a9de3450dab","Type":"ContainerStarted","Data":"b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e"} Dec 08 22:37:54 crc kubenswrapper[4791]: I1208 22:37:54.100386 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qcdjc" podStartSLOduration=2.507898613 podStartE2EDuration="5.100365808s" podCreationTimestamp="2025-12-08 22:37:49 +0000 UTC" firstStartedPulling="2025-12-08 22:37:51.038300963 +0000 UTC m=+4747.737059308" lastFinishedPulling="2025-12-08 22:37:53.630768158 +0000 UTC m=+4750.329526503" observedRunningTime="2025-12-08 22:37:54.091767543 +0000 UTC m=+4750.790525908" watchObservedRunningTime="2025-12-08 22:37:54.100365808 +0000 UTC m=+4750.799124143" Dec 08 22:38:00 crc kubenswrapper[4791]: I1208 22:38:00.271143 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:38:00 crc kubenswrapper[4791]: I1208 22:38:00.271649 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:38:00 crc kubenswrapper[4791]: I1208 22:38:00.321843 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:38:00 crc kubenswrapper[4791]: I1208 22:38:00.597623 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:38:00 crc kubenswrapper[4791]: E1208 22:38:00.597949 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:38:01 crc kubenswrapper[4791]: I1208 22:38:01.212457 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:38:01 crc kubenswrapper[4791]: I1208 22:38:01.263516 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcdjc"] Dec 08 22:38:02 crc kubenswrapper[4791]: I1208 22:38:02.255737 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:38:02 crc kubenswrapper[4791]: I1208 22:38:02.310950 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:38:02 crc kubenswrapper[4791]: I1208 22:38:02.959999 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tg2v6"] Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.186547 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qcdjc" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="registry-server" containerID="cri-o://b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e" gracePeriod=2 Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.715633 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.830147 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-catalog-content\") pod \"845fe551-9f2e-4491-bf66-6a9de3450dab\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.830765 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-utilities\") pod \"845fe551-9f2e-4491-bf66-6a9de3450dab\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.830974 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgcfh\" (UniqueName: \"kubernetes.io/projected/845fe551-9f2e-4491-bf66-6a9de3450dab-kube-api-access-qgcfh\") pod \"845fe551-9f2e-4491-bf66-6a9de3450dab\" (UID: \"845fe551-9f2e-4491-bf66-6a9de3450dab\") " Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.831589 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-utilities" (OuterVolumeSpecName: "utilities") pod "845fe551-9f2e-4491-bf66-6a9de3450dab" (UID: "845fe551-9f2e-4491-bf66-6a9de3450dab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.836792 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/845fe551-9f2e-4491-bf66-6a9de3450dab-kube-api-access-qgcfh" (OuterVolumeSpecName: "kube-api-access-qgcfh") pod "845fe551-9f2e-4491-bf66-6a9de3450dab" (UID: "845fe551-9f2e-4491-bf66-6a9de3450dab"). InnerVolumeSpecName "kube-api-access-qgcfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.850470 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "845fe551-9f2e-4491-bf66-6a9de3450dab" (UID: "845fe551-9f2e-4491-bf66-6a9de3450dab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.934528 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.934569 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/845fe551-9f2e-4491-bf66-6a9de3450dab-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:38:03 crc kubenswrapper[4791]: I1208 22:38:03.934582 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgcfh\" (UniqueName: \"kubernetes.io/projected/845fe551-9f2e-4491-bf66-6a9de3450dab-kube-api-access-qgcfh\") on node \"crc\" DevicePath \"\"" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.200383 4791 generic.go:334] "Generic (PLEG): container finished" podID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerID="b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e" exitCode=0 Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.200532 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcdjc" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.200530 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcdjc" event={"ID":"845fe551-9f2e-4491-bf66-6a9de3450dab","Type":"ContainerDied","Data":"b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e"} Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.200627 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcdjc" event={"ID":"845fe551-9f2e-4491-bf66-6a9de3450dab","Type":"ContainerDied","Data":"be6ad778b87ab80a7fa3ec837a43ba147d2ce55457d04ff3ed789310e2efa155"} Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.200634 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tg2v6" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="registry-server" containerID="cri-o://fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3" gracePeriod=2 Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.200667 4791 scope.go:117] "RemoveContainer" containerID="b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.240154 4791 scope.go:117] "RemoveContainer" containerID="257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.247014 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcdjc"] Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.269417 4791 scope.go:117] "RemoveContainer" containerID="36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.270056 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcdjc"] Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.471538 4791 scope.go:117] "RemoveContainer" containerID="b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e" Dec 08 22:38:04 crc kubenswrapper[4791]: E1208 22:38:04.472398 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e\": container with ID starting with b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e not found: ID does not exist" containerID="b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.472474 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e"} err="failed to get container status \"b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e\": rpc error: code = NotFound desc = could not find container \"b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e\": container with ID starting with b3e9c8d810451b924c8363c2c061d8d48c0c8ea681fbdde937e559496cfc5c2e not found: ID does not exist" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.472509 4791 scope.go:117] "RemoveContainer" containerID="257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7" Dec 08 22:38:04 crc kubenswrapper[4791]: E1208 22:38:04.473098 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7\": container with ID starting with 257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7 not found: ID does not exist" containerID="257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.473137 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7"} err="failed to get container status \"257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7\": rpc error: code = NotFound desc = could not find container \"257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7\": container with ID starting with 257bbff871e1b797b8edbc08dea5c34a2d0909d6024ff692aaf98407025360b7 not found: ID does not exist" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.473166 4791 scope.go:117] "RemoveContainer" containerID="36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56" Dec 08 22:38:04 crc kubenswrapper[4791]: E1208 22:38:04.473449 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56\": container with ID starting with 36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56 not found: ID does not exist" containerID="36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.473477 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56"} err="failed to get container status \"36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56\": rpc error: code = NotFound desc = could not find container \"36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56\": container with ID starting with 36e67323d3fd1b86fee73fdd310f852a914c9600c9a6d7413efefb94390b3b56 not found: ID does not exist" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.754043 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.855967 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-utilities\") pod \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.856120 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-catalog-content\") pod \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.856194 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57jxr\" (UniqueName: \"kubernetes.io/projected/ca48b62d-ffb4-4dfe-bf79-6124995616ec-kube-api-access-57jxr\") pod \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\" (UID: \"ca48b62d-ffb4-4dfe-bf79-6124995616ec\") " Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.856944 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-utilities" (OuterVolumeSpecName: "utilities") pod "ca48b62d-ffb4-4dfe-bf79-6124995616ec" (UID: "ca48b62d-ffb4-4dfe-bf79-6124995616ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.863431 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca48b62d-ffb4-4dfe-bf79-6124995616ec-kube-api-access-57jxr" (OuterVolumeSpecName: "kube-api-access-57jxr") pod "ca48b62d-ffb4-4dfe-bf79-6124995616ec" (UID: "ca48b62d-ffb4-4dfe-bf79-6124995616ec"). InnerVolumeSpecName "kube-api-access-57jxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.958906 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57jxr\" (UniqueName: \"kubernetes.io/projected/ca48b62d-ffb4-4dfe-bf79-6124995616ec-kube-api-access-57jxr\") on node \"crc\" DevicePath \"\"" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.958942 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:38:04 crc kubenswrapper[4791]: I1208 22:38:04.984614 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca48b62d-ffb4-4dfe-bf79-6124995616ec" (UID: "ca48b62d-ffb4-4dfe-bf79-6124995616ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.061126 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca48b62d-ffb4-4dfe-bf79-6124995616ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.214865 4791 generic.go:334] "Generic (PLEG): container finished" podID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerID="fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3" exitCode=0 Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.214920 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tg2v6" event={"ID":"ca48b62d-ffb4-4dfe-bf79-6124995616ec","Type":"ContainerDied","Data":"fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3"} Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.214965 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tg2v6" event={"ID":"ca48b62d-ffb4-4dfe-bf79-6124995616ec","Type":"ContainerDied","Data":"16ad9ba4ef89fbf4c64314c6e0d4972b751445fa411d7d4c60c93a3c8abab167"} Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.214961 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tg2v6" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.215013 4791 scope.go:117] "RemoveContainer" containerID="fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.248948 4791 scope.go:117] "RemoveContainer" containerID="410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.251475 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tg2v6"] Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.261440 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tg2v6"] Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.270799 4791 scope.go:117] "RemoveContainer" containerID="53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.295451 4791 scope.go:117] "RemoveContainer" containerID="fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3" Dec 08 22:38:05 crc kubenswrapper[4791]: E1208 22:38:05.295999 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3\": container with ID starting with fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3 not found: ID does not exist" containerID="fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.296064 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3"} err="failed to get container status \"fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3\": rpc error: code = NotFound desc = could not find container \"fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3\": container with ID starting with fcf6f7cb341eb07480757bbeb15bb78ef503b402374691a7ec841836f06adfa3 not found: ID does not exist" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.296148 4791 scope.go:117] "RemoveContainer" containerID="410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564" Dec 08 22:38:05 crc kubenswrapper[4791]: E1208 22:38:05.296536 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564\": container with ID starting with 410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564 not found: ID does not exist" containerID="410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.296562 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564"} err="failed to get container status \"410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564\": rpc error: code = NotFound desc = could not find container \"410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564\": container with ID starting with 410be9b5d2f32f5b1c04a908fe40827421ee9adce58b9c213e0aeeabe6d25564 not found: ID does not exist" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.296575 4791 scope.go:117] "RemoveContainer" containerID="53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce" Dec 08 22:38:05 crc kubenswrapper[4791]: E1208 22:38:05.296850 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce\": container with ID starting with 53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce not found: ID does not exist" containerID="53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.296874 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce"} err="failed to get container status \"53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce\": rpc error: code = NotFound desc = could not find container \"53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce\": container with ID starting with 53f258b0c99fd132f4b7dc51a2a4ffe196651bcdb0bd169217e732acbd74f6ce not found: ID does not exist" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.615030 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" path="/var/lib/kubelet/pods/845fe551-9f2e-4491-bf66-6a9de3450dab/volumes" Dec 08 22:38:05 crc kubenswrapper[4791]: I1208 22:38:05.616156 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" path="/var/lib/kubelet/pods/ca48b62d-ffb4-4dfe-bf79-6124995616ec/volumes" Dec 08 22:38:14 crc kubenswrapper[4791]: I1208 22:38:14.597998 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:38:14 crc kubenswrapper[4791]: E1208 22:38:14.598881 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:38:28 crc kubenswrapper[4791]: I1208 22:38:28.599393 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:38:28 crc kubenswrapper[4791]: E1208 22:38:28.600147 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:38:40 crc kubenswrapper[4791]: I1208 22:38:40.598698 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:38:40 crc kubenswrapper[4791]: E1208 22:38:40.599896 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:38:51 crc kubenswrapper[4791]: I1208 22:38:51.598288 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:38:51 crc kubenswrapper[4791]: E1208 22:38:51.599039 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:39:03 crc kubenswrapper[4791]: I1208 22:39:03.605861 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:39:04 crc kubenswrapper[4791]: I1208 22:39:04.828741 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8"} Dec 08 22:39:04 crc kubenswrapper[4791]: I1208 22:39:04.829259 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:39:05 crc kubenswrapper[4791]: I1208 22:39:05.252026 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:39:05 crc kubenswrapper[4791]: I1208 22:39:05.253030 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:39:15 crc kubenswrapper[4791]: I1208 22:39:15.886550 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:39:35 crc kubenswrapper[4791]: I1208 22:39:35.251149 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:39:35 crc kubenswrapper[4791]: I1208 22:39:35.251813 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.251864 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.252380 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.252468 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.253437 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3014bf225e3c79e5f5e2055994e09b528c4a4f223d744e19e904fa5ce5728012"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.253496 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://3014bf225e3c79e5f5e2055994e09b528c4a4f223d744e19e904fa5ce5728012" gracePeriod=600 Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.442131 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="3014bf225e3c79e5f5e2055994e09b528c4a4f223d744e19e904fa5ce5728012" exitCode=0 Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.442218 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"3014bf225e3c79e5f5e2055994e09b528c4a4f223d744e19e904fa5ce5728012"} Dec 08 22:40:05 crc kubenswrapper[4791]: I1208 22:40:05.442515 4791 scope.go:117] "RemoveContainer" containerID="6e1af713c778add8df914ee1eae12a3f8026f2e235ea69a585cb2b153e56dc73" Dec 08 22:40:06 crc kubenswrapper[4791]: I1208 22:40:06.453377 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd"} Dec 08 22:41:38 crc kubenswrapper[4791]: I1208 22:41:38.397529 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" exitCode=1 Dec 08 22:41:38 crc kubenswrapper[4791]: I1208 22:41:38.397614 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8"} Dec 08 22:41:38 crc kubenswrapper[4791]: I1208 22:41:38.398063 4791 scope.go:117] "RemoveContainer" containerID="c94dd952fbd13f9f47ab406886e78f89cdc92ef73e8da6c751e274a3aae30a9d" Dec 08 22:41:38 crc kubenswrapper[4791]: I1208 22:41:38.399156 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:41:38 crc kubenswrapper[4791]: E1208 22:41:38.399649 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.264170 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2s4jt/must-gather-rwxfn"] Dec 08 22:41:43 crc kubenswrapper[4791]: E1208 22:41:43.265169 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="registry-server" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265187 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="registry-server" Dec 08 22:41:43 crc kubenswrapper[4791]: E1208 22:41:43.265235 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="extract-utilities" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265242 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="extract-utilities" Dec 08 22:41:43 crc kubenswrapper[4791]: E1208 22:41:43.265251 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="extract-content" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265258 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="extract-content" Dec 08 22:41:43 crc kubenswrapper[4791]: E1208 22:41:43.265270 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="extract-content" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265276 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="extract-content" Dec 08 22:41:43 crc kubenswrapper[4791]: E1208 22:41:43.265290 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="extract-utilities" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265296 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="extract-utilities" Dec 08 22:41:43 crc kubenswrapper[4791]: E1208 22:41:43.265307 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="registry-server" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265313 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="registry-server" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265545 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca48b62d-ffb4-4dfe-bf79-6124995616ec" containerName="registry-server" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.265559 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="845fe551-9f2e-4491-bf66-6a9de3450dab" containerName="registry-server" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.267001 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.268959 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2s4jt"/"default-dockercfg-kqkmx" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.270032 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2s4jt"/"kube-root-ca.crt" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.272787 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2s4jt"/"openshift-service-ca.crt" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.290502 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2s4jt/must-gather-rwxfn"] Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.374415 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9qc4\" (UniqueName: \"kubernetes.io/projected/c125dfed-a0fd-4181-8461-d66b840ec3e1-kube-api-access-k9qc4\") pod \"must-gather-rwxfn\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.374786 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c125dfed-a0fd-4181-8461-d66b840ec3e1-must-gather-output\") pod \"must-gather-rwxfn\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.476751 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9qc4\" (UniqueName: \"kubernetes.io/projected/c125dfed-a0fd-4181-8461-d66b840ec3e1-kube-api-access-k9qc4\") pod \"must-gather-rwxfn\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.476812 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c125dfed-a0fd-4181-8461-d66b840ec3e1-must-gather-output\") pod \"must-gather-rwxfn\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.477424 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c125dfed-a0fd-4181-8461-d66b840ec3e1-must-gather-output\") pod \"must-gather-rwxfn\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.496458 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9qc4\" (UniqueName: \"kubernetes.io/projected/c125dfed-a0fd-4181-8461-d66b840ec3e1-kube-api-access-k9qc4\") pod \"must-gather-rwxfn\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.599534 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2s4jt"/"default-dockercfg-kqkmx" Dec 08 22:41:43 crc kubenswrapper[4791]: I1208 22:41:43.607871 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:41:44 crc kubenswrapper[4791]: I1208 22:41:44.221858 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2s4jt/must-gather-rwxfn"] Dec 08 22:41:44 crc kubenswrapper[4791]: I1208 22:41:44.478669 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" event={"ID":"c125dfed-a0fd-4181-8461-d66b840ec3e1","Type":"ContainerStarted","Data":"e0d641074310db2622dce716f3f8a52fee861bd94e7f7c9b0d01465a8722c009"} Dec 08 22:41:45 crc kubenswrapper[4791]: I1208 22:41:45.887916 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:41:45 crc kubenswrapper[4791]: I1208 22:41:45.888225 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:41:45 crc kubenswrapper[4791]: I1208 22:41:45.889337 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:41:45 crc kubenswrapper[4791]: E1208 22:41:45.889667 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:41:53 crc kubenswrapper[4791]: I1208 22:41:53.588863 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" event={"ID":"c125dfed-a0fd-4181-8461-d66b840ec3e1","Type":"ContainerStarted","Data":"6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949"} Dec 08 22:41:53 crc kubenswrapper[4791]: I1208 22:41:53.590380 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" event={"ID":"c125dfed-a0fd-4181-8461-d66b840ec3e1","Type":"ContainerStarted","Data":"02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712"} Dec 08 22:41:53 crc kubenswrapper[4791]: I1208 22:41:53.616409 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" podStartSLOduration=1.972659785 podStartE2EDuration="10.616389451s" podCreationTimestamp="2025-12-08 22:41:43 +0000 UTC" firstStartedPulling="2025-12-08 22:41:44.22434978 +0000 UTC m=+4980.923108125" lastFinishedPulling="2025-12-08 22:41:52.868079456 +0000 UTC m=+4989.566837791" observedRunningTime="2025-12-08 22:41:53.609893528 +0000 UTC m=+4990.308651873" watchObservedRunningTime="2025-12-08 22:41:53.616389451 +0000 UTC m=+4990.315147796" Dec 08 22:41:57 crc kubenswrapper[4791]: I1208 22:41:57.775436 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2s4jt/crc-debug-ngzgb"] Dec 08 22:41:57 crc kubenswrapper[4791]: I1208 22:41:57.778008 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:57 crc kubenswrapper[4791]: I1208 22:41:57.934470 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gbmb\" (UniqueName: \"kubernetes.io/projected/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-kube-api-access-6gbmb\") pod \"crc-debug-ngzgb\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:57 crc kubenswrapper[4791]: I1208 22:41:57.934623 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-host\") pod \"crc-debug-ngzgb\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:58 crc kubenswrapper[4791]: I1208 22:41:58.036491 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-host\") pod \"crc-debug-ngzgb\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:58 crc kubenswrapper[4791]: I1208 22:41:58.036663 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-host\") pod \"crc-debug-ngzgb\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:58 crc kubenswrapper[4791]: I1208 22:41:58.036734 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gbmb\" (UniqueName: \"kubernetes.io/projected/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-kube-api-access-6gbmb\") pod \"crc-debug-ngzgb\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:58 crc kubenswrapper[4791]: I1208 22:41:58.062280 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gbmb\" (UniqueName: \"kubernetes.io/projected/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-kube-api-access-6gbmb\") pod \"crc-debug-ngzgb\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:58 crc kubenswrapper[4791]: I1208 22:41:58.097870 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:41:58 crc kubenswrapper[4791]: I1208 22:41:58.650606 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" event={"ID":"3b6ae4db-90ee-4e03-93f6-b1693f92fe00","Type":"ContainerStarted","Data":"c75b3bcba755fb59ca5b7249b7679f26a51848420fde6616fc58b1fcbecd83c8"} Dec 08 22:42:00 crc kubenswrapper[4791]: I1208 22:42:00.598520 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:42:00 crc kubenswrapper[4791]: E1208 22:42:00.599300 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:42:05 crc kubenswrapper[4791]: I1208 22:42:05.251928 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:42:05 crc kubenswrapper[4791]: I1208 22:42:05.252425 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:42:12 crc kubenswrapper[4791]: I1208 22:42:12.872840 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" event={"ID":"3b6ae4db-90ee-4e03-93f6-b1693f92fe00","Type":"ContainerStarted","Data":"542abad2c92db228f21d672a48b7aba3c52503c862442f8755446351de5707f9"} Dec 08 22:42:12 crc kubenswrapper[4791]: I1208 22:42:12.892426 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" podStartSLOduration=2.323781529 podStartE2EDuration="15.892400068s" podCreationTimestamp="2025-12-08 22:41:57 +0000 UTC" firstStartedPulling="2025-12-08 22:41:58.159936132 +0000 UTC m=+4994.858694477" lastFinishedPulling="2025-12-08 22:42:11.728554661 +0000 UTC m=+5008.427313016" observedRunningTime="2025-12-08 22:42:12.890778028 +0000 UTC m=+5009.589536373" watchObservedRunningTime="2025-12-08 22:42:12.892400068 +0000 UTC m=+5009.591158413" Dec 08 22:42:15 crc kubenswrapper[4791]: I1208 22:42:15.597861 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:42:15 crc kubenswrapper[4791]: E1208 22:42:15.598593 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:42:27 crc kubenswrapper[4791]: I1208 22:42:27.598844 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:42:27 crc kubenswrapper[4791]: E1208 22:42:27.599778 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:42:35 crc kubenswrapper[4791]: I1208 22:42:35.251238 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:42:35 crc kubenswrapper[4791]: I1208 22:42:35.251798 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:42:37 crc kubenswrapper[4791]: I1208 22:42:37.659500 4791 generic.go:334] "Generic (PLEG): container finished" podID="3b6ae4db-90ee-4e03-93f6-b1693f92fe00" containerID="542abad2c92db228f21d672a48b7aba3c52503c862442f8755446351de5707f9" exitCode=0 Dec 08 22:42:37 crc kubenswrapper[4791]: I1208 22:42:37.659581 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" event={"ID":"3b6ae4db-90ee-4e03-93f6-b1693f92fe00","Type":"ContainerDied","Data":"542abad2c92db228f21d672a48b7aba3c52503c862442f8755446351de5707f9"} Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.821784 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.857082 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2s4jt/crc-debug-ngzgb"] Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.866872 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2s4jt/crc-debug-ngzgb"] Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.929079 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gbmb\" (UniqueName: \"kubernetes.io/projected/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-kube-api-access-6gbmb\") pod \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.929158 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-host\") pod \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\" (UID: \"3b6ae4db-90ee-4e03-93f6-b1693f92fe00\") " Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.929360 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-host" (OuterVolumeSpecName: "host") pod "3b6ae4db-90ee-4e03-93f6-b1693f92fe00" (UID: "3b6ae4db-90ee-4e03-93f6-b1693f92fe00"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.930003 4791 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-host\") on node \"crc\" DevicePath \"\"" Dec 08 22:42:38 crc kubenswrapper[4791]: I1208 22:42:38.944448 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-kube-api-access-6gbmb" (OuterVolumeSpecName: "kube-api-access-6gbmb") pod "3b6ae4db-90ee-4e03-93f6-b1693f92fe00" (UID: "3b6ae4db-90ee-4e03-93f6-b1693f92fe00"). InnerVolumeSpecName "kube-api-access-6gbmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:42:39 crc kubenswrapper[4791]: I1208 22:42:39.032949 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gbmb\" (UniqueName: \"kubernetes.io/projected/3b6ae4db-90ee-4e03-93f6-b1693f92fe00-kube-api-access-6gbmb\") on node \"crc\" DevicePath \"\"" Dec 08 22:42:39 crc kubenswrapper[4791]: I1208 22:42:39.613816 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b6ae4db-90ee-4e03-93f6-b1693f92fe00" path="/var/lib/kubelet/pods/3b6ae4db-90ee-4e03-93f6-b1693f92fe00/volumes" Dec 08 22:42:39 crc kubenswrapper[4791]: I1208 22:42:39.683627 4791 scope.go:117] "RemoveContainer" containerID="542abad2c92db228f21d672a48b7aba3c52503c862442f8755446351de5707f9" Dec 08 22:42:39 crc kubenswrapper[4791]: I1208 22:42:39.683983 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-ngzgb" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.050530 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2s4jt/crc-debug-cvrnt"] Dec 08 22:42:40 crc kubenswrapper[4791]: E1208 22:42:40.051048 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b6ae4db-90ee-4e03-93f6-b1693f92fe00" containerName="container-00" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.051060 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b6ae4db-90ee-4e03-93f6-b1693f92fe00" containerName="container-00" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.051409 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b6ae4db-90ee-4e03-93f6-b1693f92fe00" containerName="container-00" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.052250 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.158030 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sblfb\" (UniqueName: \"kubernetes.io/projected/5040198a-3d23-4a01-a43e-d334617757bf-kube-api-access-sblfb\") pod \"crc-debug-cvrnt\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.158241 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5040198a-3d23-4a01-a43e-d334617757bf-host\") pod \"crc-debug-cvrnt\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.260549 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sblfb\" (UniqueName: \"kubernetes.io/projected/5040198a-3d23-4a01-a43e-d334617757bf-kube-api-access-sblfb\") pod \"crc-debug-cvrnt\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.260683 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5040198a-3d23-4a01-a43e-d334617757bf-host\") pod \"crc-debug-cvrnt\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.260793 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5040198a-3d23-4a01-a43e-d334617757bf-host\") pod \"crc-debug-cvrnt\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.279047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sblfb\" (UniqueName: \"kubernetes.io/projected/5040198a-3d23-4a01-a43e-d334617757bf-kube-api-access-sblfb\") pod \"crc-debug-cvrnt\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.369996 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:40 crc kubenswrapper[4791]: W1208 22:42:40.422592 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0 WatchSource:0}: Error finding container 2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0: Status 404 returned error can't find the container with id 2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0 Dec 08 22:42:40 crc kubenswrapper[4791]: I1208 22:42:40.695204 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" event={"ID":"5040198a-3d23-4a01-a43e-d334617757bf","Type":"ContainerStarted","Data":"2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0"} Dec 08 22:42:41 crc kubenswrapper[4791]: I1208 22:42:41.597892 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:42:41 crc kubenswrapper[4791]: E1208 22:42:41.598858 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:42:41 crc kubenswrapper[4791]: I1208 22:42:41.708534 4791 generic.go:334] "Generic (PLEG): container finished" podID="5040198a-3d23-4a01-a43e-d334617757bf" containerID="7a4192b4a0c468df8d3f5f07b063749fb4e97db69973eba729872e1faad4be66" exitCode=1 Dec 08 22:42:41 crc kubenswrapper[4791]: I1208 22:42:41.708584 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" event={"ID":"5040198a-3d23-4a01-a43e-d334617757bf","Type":"ContainerDied","Data":"7a4192b4a0c468df8d3f5f07b063749fb4e97db69973eba729872e1faad4be66"} Dec 08 22:42:41 crc kubenswrapper[4791]: I1208 22:42:41.752833 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2s4jt/crc-debug-cvrnt"] Dec 08 22:42:41 crc kubenswrapper[4791]: I1208 22:42:41.765332 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2s4jt/crc-debug-cvrnt"] Dec 08 22:42:42 crc kubenswrapper[4791]: I1208 22:42:42.852624 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:42 crc kubenswrapper[4791]: I1208 22:42:42.927099 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sblfb\" (UniqueName: \"kubernetes.io/projected/5040198a-3d23-4a01-a43e-d334617757bf-kube-api-access-sblfb\") pod \"5040198a-3d23-4a01-a43e-d334617757bf\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " Dec 08 22:42:42 crc kubenswrapper[4791]: I1208 22:42:42.927215 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5040198a-3d23-4a01-a43e-d334617757bf-host\") pod \"5040198a-3d23-4a01-a43e-d334617757bf\" (UID: \"5040198a-3d23-4a01-a43e-d334617757bf\") " Dec 08 22:42:42 crc kubenswrapper[4791]: I1208 22:42:42.927318 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5040198a-3d23-4a01-a43e-d334617757bf-host" (OuterVolumeSpecName: "host") pod "5040198a-3d23-4a01-a43e-d334617757bf" (UID: "5040198a-3d23-4a01-a43e-d334617757bf"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 22:42:42 crc kubenswrapper[4791]: I1208 22:42:42.927987 4791 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5040198a-3d23-4a01-a43e-d334617757bf-host\") on node \"crc\" DevicePath \"\"" Dec 08 22:42:42 crc kubenswrapper[4791]: I1208 22:42:42.939467 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5040198a-3d23-4a01-a43e-d334617757bf-kube-api-access-sblfb" (OuterVolumeSpecName: "kube-api-access-sblfb") pod "5040198a-3d23-4a01-a43e-d334617757bf" (UID: "5040198a-3d23-4a01-a43e-d334617757bf"). InnerVolumeSpecName "kube-api-access-sblfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:42:43 crc kubenswrapper[4791]: I1208 22:42:43.030904 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sblfb\" (UniqueName: \"kubernetes.io/projected/5040198a-3d23-4a01-a43e-d334617757bf-kube-api-access-sblfb\") on node \"crc\" DevicePath \"\"" Dec 08 22:42:43 crc kubenswrapper[4791]: I1208 22:42:43.614033 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5040198a-3d23-4a01-a43e-d334617757bf" path="/var/lib/kubelet/pods/5040198a-3d23-4a01-a43e-d334617757bf/volumes" Dec 08 22:42:43 crc kubenswrapper[4791]: I1208 22:42:43.745828 4791 scope.go:117] "RemoveContainer" containerID="7a4192b4a0c468df8d3f5f07b063749fb4e97db69973eba729872e1faad4be66" Dec 08 22:42:43 crc kubenswrapper[4791]: I1208 22:42:43.745857 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/crc-debug-cvrnt" Dec 08 22:42:48 crc kubenswrapper[4791]: E1208 22:42:48.134732 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache]" Dec 08 22:42:48 crc kubenswrapper[4791]: E1208 22:42:48.138107 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache]" Dec 08 22:42:50 crc kubenswrapper[4791]: E1208 22:42:50.251450 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache]" Dec 08 22:42:53 crc kubenswrapper[4791]: E1208 22:42:53.427107 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache]" Dec 08 22:42:53 crc kubenswrapper[4791]: I1208 22:42:53.605416 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:42:53 crc kubenswrapper[4791]: E1208 22:42:53.605988 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:43:03 crc kubenswrapper[4791]: E1208 22:43:03.480643 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache]" Dec 08 22:43:05 crc kubenswrapper[4791]: E1208 22:43:05.068361 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache]" Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.251295 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.251361 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.251416 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.252364 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.252439 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" gracePeriod=600 Dec 08 22:43:05 crc kubenswrapper[4791]: E1208 22:43:05.392560 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.597751 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:43:05 crc kubenswrapper[4791]: E1208 22:43:05.598034 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.975942 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" exitCode=0 Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.975996 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd"} Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.976051 4791 scope.go:117] "RemoveContainer" containerID="3014bf225e3c79e5f5e2055994e09b528c4a4f223d744e19e904fa5ce5728012" Dec 08 22:43:05 crc kubenswrapper[4791]: I1208 22:43:05.977212 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:43:05 crc kubenswrapper[4791]: E1208 22:43:05.978012 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:43:11 crc kubenswrapper[4791]: I1208 22:43:11.776642 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56d9b646cd-6b4tm_46915f99-16c5-4219-83db-3565edb2ea87/barbican-api/0.log" Dec 08 22:43:11 crc kubenswrapper[4791]: I1208 22:43:11.903523 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56d9b646cd-6b4tm_46915f99-16c5-4219-83db-3565edb2ea87/barbican-api-log/0.log" Dec 08 22:43:11 crc kubenswrapper[4791]: I1208 22:43:11.979424 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-655599f54b-bjpp6_85e818fb-c004-4699-86d4-d06e8216ddd3/barbican-keystone-listener/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.005111 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-655599f54b-bjpp6_85e818fb-c004-4699-86d4-d06e8216ddd3/barbican-keystone-listener-log/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.157438 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-765b6fcd95-7rp4s_ae2c49a8-2981-4fa3-a18a-afa91303d23f/barbican-worker/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.164839 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-765b6fcd95-7rp4s_ae2c49a8-2981-4fa3-a18a-afa91303d23f/barbican-worker-log/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.386184 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_41ccdd19-b7c2-4647-92a2-1b3396777cb7/cinder-api-log/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.410626 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_41ccdd19-b7c2-4647-92a2-1b3396777cb7/cinder-api/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.519325 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c020e24e-53b8-4042-a8b6-18cf852464a3/cinder-scheduler/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.626873 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c020e24e-53b8-4042-a8b6-18cf852464a3/probe/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.678310 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-f84f9ccf-tdxxg_974e6ad2-a5d0-4a35-b88a-a72fea48b754/init/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.870933 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-f84f9ccf-tdxxg_974e6ad2-a5d0-4a35-b88a-a72fea48b754/dnsmasq-dns/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.890964 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-f84f9ccf-tdxxg_974e6ad2-a5d0-4a35-b88a-a72fea48b754/init/0.log" Dec 08 22:43:12 crc kubenswrapper[4791]: I1208 22:43:12.940171 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2afefbde-0eb6-4887-94b7-c018e79f1ddb/glance-httpd/0.log" Dec 08 22:43:13 crc kubenswrapper[4791]: I1208 22:43:13.085886 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2afefbde-0eb6-4887-94b7-c018e79f1ddb/glance-log/0.log" Dec 08 22:43:13 crc kubenswrapper[4791]: I1208 22:43:13.144616 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f9c41bbf-581f-4055-8855-6775f65b2409/glance-httpd/0.log" Dec 08 22:43:13 crc kubenswrapper[4791]: I1208 22:43:13.190151 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f9c41bbf-581f-4055-8855-6775f65b2409/glance-log/0.log" Dec 08 22:43:13 crc kubenswrapper[4791]: I1208 22:43:13.724667 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-d44cc5586-cqs7v_c5036ceb-802c-446d-ac98-a56f732e25d9/heat-engine/0.log" Dec 08 22:43:13 crc kubenswrapper[4791]: E1208 22:43:13.754754 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache]" Dec 08 22:43:13 crc kubenswrapper[4791]: I1208 22:43:13.961045 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-59878569f5-swkzv_ad72828f-fcf7-494f-8ccf-384cde0ef6c9/heat-api/0.log" Dec 08 22:43:14 crc kubenswrapper[4791]: I1208 22:43:14.000618 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-675d76c787-jzkrg_650a3079-dd09-461d-b647-bae2adac5ee6/heat-cfnapi/0.log" Dec 08 22:43:14 crc kubenswrapper[4791]: I1208 22:43:14.067787 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5fd957fc96-br4ld_abde564f-96c1-47f0-ab05-c8e54905668b/keystone-api/0.log" Dec 08 22:43:14 crc kubenswrapper[4791]: I1208 22:43:14.265006 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29420521-54kx5_0d3bd890-27a0-476f-85fb-55fbdb17e6a4/keystone-cron/0.log" Dec 08 22:43:14 crc kubenswrapper[4791]: I1208 22:43:14.580657 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-947b647f-l42kf_9e54b9bb-f636-476f-8285-01dc712110d5/neutron-httpd/0.log" Dec 08 22:43:14 crc kubenswrapper[4791]: I1208 22:43:14.665655 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-947b647f-l42kf_9e54b9bb-f636-476f-8285-01dc712110d5/neutron-api/0.log" Dec 08 22:43:15 crc kubenswrapper[4791]: I1208 22:43:15.204256 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_f6371760-9057-4d77-9e38-3a8523adb28f/nova-api-log/0.log" Dec 08 22:43:15 crc kubenswrapper[4791]: I1208 22:43:15.488443 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_f6371760-9057-4d77-9e38-3a8523adb28f/nova-api-api/0.log" Dec 08 22:43:15 crc kubenswrapper[4791]: I1208 22:43:15.586039 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_1d854578-6157-4d97-879e-81bcb802f28d/nova-cell0-conductor-conductor/0.log" Dec 08 22:43:16 crc kubenswrapper[4791]: I1208 22:43:16.224908 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_11583b6c-2a5f-4a4d-b4ec-b59d3dc32f5d/nova-cell1-conductor-conductor/0.log" Dec 08 22:43:16 crc kubenswrapper[4791]: I1208 22:43:16.482307 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_8b6dd596-2078-462a-9c8c-5694cf4e1f9d/nova-cell1-novncproxy-novncproxy/0.log" Dec 08 22:43:16 crc kubenswrapper[4791]: I1208 22:43:16.554032 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e85819c2-ae52-42b3-89d5-426970706586/nova-metadata-log/0.log" Dec 08 22:43:16 crc kubenswrapper[4791]: I1208 22:43:16.955565 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_cafa4036-e745-4f13-abd0-06e498ef4bcc/nova-scheduler-scheduler/0.log" Dec 08 22:43:16 crc kubenswrapper[4791]: I1208 22:43:16.997585 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8e46f211-8213-4219-9389-044888a87181/mysql-bootstrap/0.log" Dec 08 22:43:17 crc kubenswrapper[4791]: I1208 22:43:17.259200 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8e46f211-8213-4219-9389-044888a87181/mysql-bootstrap/0.log" Dec 08 22:43:17 crc kubenswrapper[4791]: I1208 22:43:17.416624 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8e46f211-8213-4219-9389-044888a87181/galera/0.log" Dec 08 22:43:17 crc kubenswrapper[4791]: I1208 22:43:17.475949 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0eeeba8c-9e5c-4701-9941-8a324604a18b/mysql-bootstrap/0.log" Dec 08 22:43:17 crc kubenswrapper[4791]: I1208 22:43:17.601563 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:43:17 crc kubenswrapper[4791]: E1208 22:43:17.602524 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:43:17 crc kubenswrapper[4791]: I1208 22:43:17.853633 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0eeeba8c-9e5c-4701-9941-8a324604a18b/galera/0.log" Dec 08 22:43:17 crc kubenswrapper[4791]: I1208 22:43:17.863747 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0eeeba8c-9e5c-4701-9941-8a324604a18b/mysql-bootstrap/0.log" Dec 08 22:43:18 crc kubenswrapper[4791]: I1208 22:43:18.088473 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_45a88144-9574-4095-9e4b-3cc3bc138670/openstackclient/0.log" Dec 08 22:43:18 crc kubenswrapper[4791]: I1208 22:43:18.120684 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-g2zr4_0ab866e7-9292-4d1c-b55e-6d29c9d23b05/ovn-controller/0.log" Dec 08 22:43:18 crc kubenswrapper[4791]: I1208 22:43:18.357113 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e85819c2-ae52-42b3-89d5-426970706586/nova-metadata-metadata/0.log" Dec 08 22:43:18 crc kubenswrapper[4791]: I1208 22:43:18.388370 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-85s7k_eb1376f4-3d74-4175-8635-500b29e89984/openstack-network-exporter/0.log" Dec 08 22:43:18 crc kubenswrapper[4791]: I1208 22:43:18.565097 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9b4hh_d9b8addc-5f49-43c0-a4c3-23ed14252765/ovsdb-server-init/0.log" Dec 08 22:43:18 crc kubenswrapper[4791]: I1208 22:43:18.815818 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9b4hh_d9b8addc-5f49-43c0-a4c3-23ed14252765/ovsdb-server-init/0.log" Dec 08 22:43:18 crc kubenswrapper[4791]: I1208 22:43:18.826044 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9b4hh_d9b8addc-5f49-43c0-a4c3-23ed14252765/ovs-vswitchd/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.046518 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9b4hh_d9b8addc-5f49-43c0-a4c3-23ed14252765/ovsdb-server/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.081659 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e2977e67-f037-4524-bfb5-0b04940113f7/openstack-network-exporter/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.155439 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e2977e67-f037-4524-bfb5-0b04940113f7/ovn-northd/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.256462 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e94275f5-3fd4-409e-9496-431d35e9b1a5/openstack-network-exporter/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.425234 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e94275f5-3fd4-409e-9496-431d35e9b1a5/ovsdbserver-nb/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.539994 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cfef56eb-b1e1-48cc-9b1a-c92587748a8d/openstack-network-exporter/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.876999 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cfef56eb-b1e1-48cc-9b1a-c92587748a8d/ovsdbserver-sb/0.log" Dec 08 22:43:19 crc kubenswrapper[4791]: I1208 22:43:19.997988 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-76fcb88b6d-mffmn_9458287a-1c73-47c0-8a35-a3b14ed39fab/placement-api/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.086191 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-76fcb88b6d-mffmn_9458287a-1c73-47c0-8a35-a3b14ed39fab/placement-log/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.148994 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d9cd6ba2-6502-43cf-8e48-36570ea8e831/setup-container/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: E1208 22:43:20.292066 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache]" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.447790 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d9cd6ba2-6502-43cf-8e48-36570ea8e831/setup-container/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.458793 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9/setup-container/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.598214 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:43:20 crc kubenswrapper[4791]: E1208 22:43:20.598848 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.610072 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d9cd6ba2-6502-43cf-8e48-36570ea8e831/rabbitmq/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.773467 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9/setup-container/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.842460 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_45efc0ef-cf29-470e-bfd3-7dd3ce7b85d9/rabbitmq/0.log" Dec 08 22:43:20 crc kubenswrapper[4791]: I1208 22:43:20.903414 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-1_6101a045-4b01-484e-a65b-4c406e458ea1/setup-container/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.169258 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-1_6101a045-4b01-484e-a65b-4c406e458ea1/rabbitmq/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.187651 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-1_6101a045-4b01-484e-a65b-4c406e458ea1/setup-container/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.213379 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-2_87530e07-a720-4b5f-bd6f-c3f8bb540453/setup-container/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.425324 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-2_87530e07-a720-4b5f-bd6f-c3f8bb540453/setup-container/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.548983 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-2_87530e07-a720-4b5f-bd6f-c3f8bb540453/rabbitmq/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.591573 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8dcddd8f7-nk4tp_1a347204-ba19-40d2-8afa-48549be35c18/proxy-httpd/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.718916 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8dcddd8f7-nk4tp_1a347204-ba19-40d2-8afa-48549be35c18/proxy-server/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.797202 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-249fw_5a5de61a-f218-4e36-afaf-2cab04468093/swift-ring-rebalance/0.log" Dec 08 22:43:21 crc kubenswrapper[4791]: I1208 22:43:21.983649 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/account-auditor/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.022655 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_d1c662fa-6e9f-4127-af2f-059adea86bd4/memcached/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.043561 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/account-replicator/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.055467 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/account-reaper/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.154822 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/account-server/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.222069 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/container-updater/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.238921 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/container-auditor/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.261934 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/container-replicator/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.279462 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/container-server/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.391337 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/object-auditor/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.443957 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/object-expirer/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.469044 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/object-updater/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.474918 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/object-server/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.478820 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/object-replicator/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.597317 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/rsync/0.log" Dec 08 22:43:22 crc kubenswrapper[4791]: I1208 22:43:22.636638 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_7f7de8af-60f4-4571-bc47-95cb97ce0121/swift-recon-cron/0.log" Dec 08 22:43:23 crc kubenswrapper[4791]: E1208 22:43:23.800046 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache]" Dec 08 22:43:29 crc kubenswrapper[4791]: I1208 22:43:29.598490 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:43:29 crc kubenswrapper[4791]: E1208 22:43:29.599302 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:43:33 crc kubenswrapper[4791]: I1208 22:43:33.607456 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:43:33 crc kubenswrapper[4791]: E1208 22:43:33.608423 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:43:34 crc kubenswrapper[4791]: E1208 22:43:34.114040 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache]" Dec 08 22:43:35 crc kubenswrapper[4791]: E1208 22:43:35.069041 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice/crio-2a73941fd5ef0a712ddc6b95b939a80d823e2160e8da8fded0ada6f7cbcaa5f0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5040198a_3d23_4a01_a43e_d334617757bf.slice\": RecentStats: unable to find data in memory cache]" Dec 08 22:43:43 crc kubenswrapper[4791]: E1208 22:43:43.639191 4791 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/298ad37d1cfc04784dc0bc6ea15534b6409d28ef334cd4245c435ffd5a7a8f08/diff" to get inode usage: stat /var/lib/containers/storage/overlay/298ad37d1cfc04784dc0bc6ea15534b6409d28ef334cd4245c435ffd5a7a8f08/diff: no such file or directory, extraDiskErr: Dec 08 22:43:44 crc kubenswrapper[4791]: I1208 22:43:44.597951 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:43:44 crc kubenswrapper[4791]: I1208 22:43:44.598292 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:43:44 crc kubenswrapper[4791]: E1208 22:43:44.598439 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:43:44 crc kubenswrapper[4791]: E1208 22:43:44.598548 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:43:48 crc kubenswrapper[4791]: I1208 22:43:48.657635 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx_d6b28097-7d9a-453d-9f3e-25998fbd0181/util/0.log" Dec 08 22:43:48 crc kubenswrapper[4791]: I1208 22:43:48.847140 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx_d6b28097-7d9a-453d-9f3e-25998fbd0181/pull/0.log" Dec 08 22:43:48 crc kubenswrapper[4791]: I1208 22:43:48.862345 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx_d6b28097-7d9a-453d-9f3e-25998fbd0181/pull/0.log" Dec 08 22:43:48 crc kubenswrapper[4791]: I1208 22:43:48.866462 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx_d6b28097-7d9a-453d-9f3e-25998fbd0181/util/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.080263 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx_d6b28097-7d9a-453d-9f3e-25998fbd0181/extract/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.087316 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx_d6b28097-7d9a-453d-9f3e-25998fbd0181/pull/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.104738 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009bwv2wx_d6b28097-7d9a-453d-9f3e-25998fbd0181/util/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.299112 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-9dcqh_c3e8c89c-91da-44c4-95ec-20b5d543eca1/kube-rbac-proxy/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.345722 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-4d6px_684d1010-fc58-4789-b8f6-ebe783ec15fe/kube-rbac-proxy/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.366427 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-9dcqh_c3e8c89c-91da-44c4-95ec-20b5d543eca1/manager/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.569020 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-4d6px_684d1010-fc58-4789-b8f6-ebe783ec15fe/manager/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.576522 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-zn5rk_45ebd174-c21e-4fb5-ae01-cf6b3d5e7079/kube-rbac-proxy/0.log" Dec 08 22:43:49 crc kubenswrapper[4791]: I1208 22:43:49.635931 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-zn5rk_45ebd174-c21e-4fb5-ae01-cf6b3d5e7079/manager/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.246101 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-rzq2w_430711b5-aa60-4462-a730-242ecb914d6c/kube-rbac-proxy/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.329855 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-rzq2w_430711b5-aa60-4462-a730-242ecb914d6c/manager/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.491925 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-fsm5r_06e9548e-f7f7-4d48-a10a-06de61005b07/manager/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.507688 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-fsm5r_06e9548e-f7f7-4d48-a10a-06de61005b07/kube-rbac-proxy/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.574016 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-4mwvh_253be35d-aa0c-417b-8dc8-7ef23f63ce45/kube-rbac-proxy/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.683749 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-6fqwg_b6c24020-d177-4816-ac96-7f97f8f243a1/kube-rbac-proxy/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.702484 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-4mwvh_253be35d-aa0c-417b-8dc8-7ef23f63ce45/manager/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.922320 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-6fqwg_b6c24020-d177-4816-ac96-7f97f8f243a1/manager/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.952210 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-s5pvq_23d7f321-494b-46fa-890a-6cb7f47fdb49/kube-rbac-proxy/0.log" Dec 08 22:43:50 crc kubenswrapper[4791]: I1208 22:43:50.987810 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-s5pvq_23d7f321-494b-46fa-890a-6cb7f47fdb49/manager/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.114279 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-5tzxv_55e85e76-95f6-46ce-906a-26ce559775bc/kube-rbac-proxy/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.202422 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-5tzxv_55e85e76-95f6-46ce-906a-26ce559775bc/manager/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.326034 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-ddnts_301abb1d-1139-4636-805b-c6458568fe7e/kube-rbac-proxy/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.333945 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-ddnts_301abb1d-1139-4636-805b-c6458568fe7e/manager/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.435095 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-nlvf4_4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235/kube-rbac-proxy/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.533629 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-nlvf4_4d6f8fd6-c9d1-483e-8cc6-29d2d53f1235/manager/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.648169 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hskzn_ac0b7209-48f2-4080-bd26-86462503772b/manager/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.663877 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hskzn_ac0b7209-48f2-4080-bd26-86462503772b/kube-rbac-proxy/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.735911 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-6vmnd_8714efaf-0a6b-46ba-aadb-2fef8f7f1a32/kube-rbac-proxy/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.904970 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-6vmnd_8714efaf-0a6b-46ba-aadb-2fef8f7f1a32/manager/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.961322 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-spmqj_195e298b-eaa8-4d82-a246-bf28d442d9f9/kube-rbac-proxy/0.log" Dec 08 22:43:51 crc kubenswrapper[4791]: I1208 22:43:51.993638 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-spmqj_195e298b-eaa8-4d82-a246-bf28d442d9f9/manager/0.log" Dec 08 22:43:52 crc kubenswrapper[4791]: I1208 22:43:52.113325 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879fsnbxn_0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46/manager/0.log" Dec 08 22:43:52 crc kubenswrapper[4791]: I1208 22:43:52.126673 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879fsnbxn_0ab7fad3-1b4c-4f78-99a7-4ace7aa64b46/kube-rbac-proxy/0.log" Dec 08 22:43:52 crc kubenswrapper[4791]: I1208 22:43:52.572664 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-szl4j_6b293818-e753-40aa-88ea-04fb63c0188c/registry-server/0.log" Dec 08 22:43:52 crc kubenswrapper[4791]: I1208 22:43:52.663424 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-wx44v_cc35e433-dd6b-4cdf-9776-49106dbb9f13/kube-rbac-proxy/0.log" Dec 08 22:43:52 crc kubenswrapper[4791]: I1208 22:43:52.890874 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-wx44v_cc35e433-dd6b-4cdf-9776-49106dbb9f13/manager/0.log" Dec 08 22:43:52 crc kubenswrapper[4791]: I1208 22:43:52.903823 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-258f4_e652bc09-301e-4200-a0be-ec79798d93b7/kube-rbac-proxy/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.012487 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7d4449658c-pljtb_97760082-6ccb-4973-9fb6-274647592883/operator/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.127679 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-258f4_e652bc09-301e-4200-a0be-ec79798d93b7/manager/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.141099 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-fsp7x_48279dfc-ae82-45f8-ba4e-3906c0b1cefa/operator/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.255201 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54c84cffdd-2cvxn_e6338e82-465c-4bbd-862a-5835f329caad/manager/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.318576 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-d2fdf_a26b22b6-0795-4357-a1ff-9cbdd3b10f45/kube-rbac-proxy/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.384106 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-d2fdf_a26b22b6-0795-4357-a1ff-9cbdd3b10f45/manager/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.505522 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65f6d9c768-58wmm_bcd8d669-4a40-401d-af99-651b840fb48b/kube-rbac-proxy/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.553986 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65f6d9c768-58wmm_bcd8d669-4a40-401d-af99-651b840fb48b/manager/11.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.555340 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65f6d9c768-58wmm_bcd8d669-4a40-401d-af99-651b840fb48b/manager/11.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.694789 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-v9cg7_74434a32-0961-43af-b800-8de05830b266/kube-rbac-proxy/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.725974 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-v9cg7_74434a32-0961-43af-b800-8de05830b266/manager/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.784723 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-vszqd_6b775709-57bb-4fa2-9eb9-4785356c119c/kube-rbac-proxy/0.log" Dec 08 22:43:53 crc kubenswrapper[4791]: I1208 22:43:53.829292 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-vszqd_6b775709-57bb-4fa2-9eb9-4785356c119c/manager/0.log" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.687811 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h6fgr"] Dec 08 22:43:54 crc kubenswrapper[4791]: E1208 22:43:54.688504 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5040198a-3d23-4a01-a43e-d334617757bf" containerName="container-00" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.688520 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5040198a-3d23-4a01-a43e-d334617757bf" containerName="container-00" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.688932 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5040198a-3d23-4a01-a43e-d334617757bf" containerName="container-00" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.695044 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.716938 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h6fgr"] Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.813980 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-catalog-content\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.814041 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np2d2\" (UniqueName: \"kubernetes.io/projected/8fe0d63b-4103-4692-a065-882f3f5c7a3a-kube-api-access-np2d2\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.815239 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-utilities\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.918148 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-utilities\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.918216 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-catalog-content\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.918259 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np2d2\" (UniqueName: \"kubernetes.io/projected/8fe0d63b-4103-4692-a065-882f3f5c7a3a-kube-api-access-np2d2\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.919006 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-utilities\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.919219 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-catalog-content\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:54 crc kubenswrapper[4791]: I1208 22:43:54.940126 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np2d2\" (UniqueName: \"kubernetes.io/projected/8fe0d63b-4103-4692-a065-882f3f5c7a3a-kube-api-access-np2d2\") pod \"certified-operators-h6fgr\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:55 crc kubenswrapper[4791]: I1208 22:43:55.067300 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:43:55 crc kubenswrapper[4791]: I1208 22:43:55.789329 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h6fgr"] Dec 08 22:43:55 crc kubenswrapper[4791]: W1208 22:43:55.790547 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fe0d63b_4103_4692_a065_882f3f5c7a3a.slice/crio-a29156a1a460515c70a40c4c2728636d531739199743d44461f4e780a32bb7bd WatchSource:0}: Error finding container a29156a1a460515c70a40c4c2728636d531739199743d44461f4e780a32bb7bd: Status 404 returned error can't find the container with id a29156a1a460515c70a40c4c2728636d531739199743d44461f4e780a32bb7bd Dec 08 22:43:56 crc kubenswrapper[4791]: I1208 22:43:56.532695 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6fgr" event={"ID":"8fe0d63b-4103-4692-a065-882f3f5c7a3a","Type":"ContainerDied","Data":"8b24e2fd7f6dc01bcbdc9f22b8dff0079593ce20b01840abfe8d11c03691d7f8"} Dec 08 22:43:56 crc kubenswrapper[4791]: I1208 22:43:56.532533 4791 generic.go:334] "Generic (PLEG): container finished" podID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerID="8b24e2fd7f6dc01bcbdc9f22b8dff0079593ce20b01840abfe8d11c03691d7f8" exitCode=0 Dec 08 22:43:56 crc kubenswrapper[4791]: I1208 22:43:56.534001 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6fgr" event={"ID":"8fe0d63b-4103-4692-a065-882f3f5c7a3a","Type":"ContainerStarted","Data":"a29156a1a460515c70a40c4c2728636d531739199743d44461f4e780a32bb7bd"} Dec 08 22:43:56 crc kubenswrapper[4791]: I1208 22:43:56.535249 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:43:57 crc kubenswrapper[4791]: I1208 22:43:57.546504 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6fgr" event={"ID":"8fe0d63b-4103-4692-a065-882f3f5c7a3a","Type":"ContainerStarted","Data":"49a4ca4e97881a9ed827fa92d11bb85f5c8d698817558bc20c521937931cb5b2"} Dec 08 22:43:58 crc kubenswrapper[4791]: I1208 22:43:58.558982 4791 generic.go:334] "Generic (PLEG): container finished" podID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerID="49a4ca4e97881a9ed827fa92d11bb85f5c8d698817558bc20c521937931cb5b2" exitCode=0 Dec 08 22:43:58 crc kubenswrapper[4791]: I1208 22:43:58.559112 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6fgr" event={"ID":"8fe0d63b-4103-4692-a065-882f3f5c7a3a","Type":"ContainerDied","Data":"49a4ca4e97881a9ed827fa92d11bb85f5c8d698817558bc20c521937931cb5b2"} Dec 08 22:43:58 crc kubenswrapper[4791]: I1208 22:43:58.597995 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:43:58 crc kubenswrapper[4791]: I1208 22:43:58.598257 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:43:58 crc kubenswrapper[4791]: E1208 22:43:58.598339 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:43:58 crc kubenswrapper[4791]: E1208 22:43:58.598658 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:43:59 crc kubenswrapper[4791]: I1208 22:43:59.571005 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6fgr" event={"ID":"8fe0d63b-4103-4692-a065-882f3f5c7a3a","Type":"ContainerStarted","Data":"efd65846d7636142509573d2d66aa476a8189d418a0c7288c09b076024adf1c0"} Dec 08 22:43:59 crc kubenswrapper[4791]: I1208 22:43:59.604514 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h6fgr" podStartSLOduration=3.191600778 podStartE2EDuration="5.604495087s" podCreationTimestamp="2025-12-08 22:43:54 +0000 UTC" firstStartedPulling="2025-12-08 22:43:56.534987662 +0000 UTC m=+5113.233746007" lastFinishedPulling="2025-12-08 22:43:58.947881971 +0000 UTC m=+5115.646640316" observedRunningTime="2025-12-08 22:43:59.591238615 +0000 UTC m=+5116.289996960" watchObservedRunningTime="2025-12-08 22:43:59.604495087 +0000 UTC m=+5116.303253432" Dec 08 22:44:05 crc kubenswrapper[4791]: I1208 22:44:05.068044 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:44:05 crc kubenswrapper[4791]: I1208 22:44:05.068636 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:44:05 crc kubenswrapper[4791]: I1208 22:44:05.122422 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:44:06 crc kubenswrapper[4791]: I1208 22:44:06.181074 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:44:06 crc kubenswrapper[4791]: I1208 22:44:06.237128 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h6fgr"] Dec 08 22:44:07 crc kubenswrapper[4791]: I1208 22:44:07.656246 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h6fgr" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="registry-server" containerID="cri-o://efd65846d7636142509573d2d66aa476a8189d418a0c7288c09b076024adf1c0" gracePeriod=2 Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.684979 4791 generic.go:334] "Generic (PLEG): container finished" podID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerID="efd65846d7636142509573d2d66aa476a8189d418a0c7288c09b076024adf1c0" exitCode=0 Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.685328 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6fgr" event={"ID":"8fe0d63b-4103-4692-a065-882f3f5c7a3a","Type":"ContainerDied","Data":"efd65846d7636142509573d2d66aa476a8189d418a0c7288c09b076024adf1c0"} Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.685356 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6fgr" event={"ID":"8fe0d63b-4103-4692-a065-882f3f5c7a3a","Type":"ContainerDied","Data":"a29156a1a460515c70a40c4c2728636d531739199743d44461f4e780a32bb7bd"} Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.685366 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a29156a1a460515c70a40c4c2728636d531739199743d44461f4e780a32bb7bd" Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.718614 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.792915 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np2d2\" (UniqueName: \"kubernetes.io/projected/8fe0d63b-4103-4692-a065-882f3f5c7a3a-kube-api-access-np2d2\") pod \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.793080 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-utilities\") pod \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.793118 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-catalog-content\") pod \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\" (UID: \"8fe0d63b-4103-4692-a065-882f3f5c7a3a\") " Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.796116 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-utilities" (OuterVolumeSpecName: "utilities") pod "8fe0d63b-4103-4692-a065-882f3f5c7a3a" (UID: "8fe0d63b-4103-4692-a065-882f3f5c7a3a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.808230 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fe0d63b-4103-4692-a065-882f3f5c7a3a-kube-api-access-np2d2" (OuterVolumeSpecName: "kube-api-access-np2d2") pod "8fe0d63b-4103-4692-a065-882f3f5c7a3a" (UID: "8fe0d63b-4103-4692-a065-882f3f5c7a3a"). InnerVolumeSpecName "kube-api-access-np2d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.861038 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8fe0d63b-4103-4692-a065-882f3f5c7a3a" (UID: "8fe0d63b-4103-4692-a065-882f3f5c7a3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.896613 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np2d2\" (UniqueName: \"kubernetes.io/projected/8fe0d63b-4103-4692-a065-882f3f5c7a3a-kube-api-access-np2d2\") on node \"crc\" DevicePath \"\"" Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.896658 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:44:08 crc kubenswrapper[4791]: I1208 22:44:08.896670 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe0d63b-4103-4692-a065-882f3f5c7a3a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:44:09 crc kubenswrapper[4791]: I1208 22:44:09.694301 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6fgr" Dec 08 22:44:09 crc kubenswrapper[4791]: I1208 22:44:09.719340 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h6fgr"] Dec 08 22:44:09 crc kubenswrapper[4791]: I1208 22:44:09.728782 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h6fgr"] Dec 08 22:44:11 crc kubenswrapper[4791]: I1208 22:44:11.598242 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:44:11 crc kubenswrapper[4791]: E1208 22:44:11.599326 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:44:11 crc kubenswrapper[4791]: I1208 22:44:11.599779 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:44:11 crc kubenswrapper[4791]: E1208 22:44:11.600077 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:44:11 crc kubenswrapper[4791]: I1208 22:44:11.628272 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" path="/var/lib/kubelet/pods/8fe0d63b-4103-4692-a065-882f3f5c7a3a/volumes" Dec 08 22:44:15 crc kubenswrapper[4791]: I1208 22:44:15.743198 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="8e46f211-8213-4219-9389-044888a87181" containerName="galera" probeResult="failure" output="command timed out" Dec 08 22:44:17 crc kubenswrapper[4791]: I1208 22:44:17.765939 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-68nlk_839bdb0e-d6c5-4464-8e23-ee63845cf40f/control-plane-machine-set-operator/0.log" Dec 08 22:44:17 crc kubenswrapper[4791]: I1208 22:44:17.781938 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7c86k_10e8b274-a7ca-4b48-b5f5-7345a78cd074/kube-rbac-proxy/0.log" Dec 08 22:44:17 crc kubenswrapper[4791]: I1208 22:44:17.894302 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7c86k_10e8b274-a7ca-4b48-b5f5-7345a78cd074/machine-api-operator/0.log" Dec 08 22:44:19 crc kubenswrapper[4791]: I1208 22:44:19.840550 4791 scope.go:117] "RemoveContainer" containerID="d638f354433479bd88ffba4cbf74bedcdf302015475e21826159d2bb50f14943" Dec 08 22:44:19 crc kubenswrapper[4791]: I1208 22:44:19.882591 4791 scope.go:117] "RemoveContainer" containerID="f316e5503be4b3aca2fff81ea7f8789218c8b9dd6434b831e9d12f045f4b9f2b" Dec 08 22:44:19 crc kubenswrapper[4791]: I1208 22:44:19.920907 4791 scope.go:117] "RemoveContainer" containerID="6ca046efb571b7e809da1861b9dffa4730880bab0fb7fc55c284eae458014d7f" Dec 08 22:44:22 crc kubenswrapper[4791]: I1208 22:44:22.598018 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:44:22 crc kubenswrapper[4791]: E1208 22:44:22.598817 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:44:25 crc kubenswrapper[4791]: I1208 22:44:25.598470 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:44:25 crc kubenswrapper[4791]: E1208 22:44:25.599383 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:44:31 crc kubenswrapper[4791]: I1208 22:44:31.899086 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-pvml8_46ead9b1-5e64-4c26-a35a-6f6fd7884e1d/cert-manager-controller/0.log" Dec 08 22:44:32 crc kubenswrapper[4791]: I1208 22:44:32.022089 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-67c78_65d9eb9f-c7d6-4e03-b9b7-061d49ec03af/cert-manager-cainjector/0.log" Dec 08 22:44:32 crc kubenswrapper[4791]: I1208 22:44:32.116746 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-sd8bs_60c082ea-6911-490a-8989-a16e6a63fac6/cert-manager-webhook/0.log" Dec 08 22:44:33 crc kubenswrapper[4791]: I1208 22:44:33.608690 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:44:33 crc kubenswrapper[4791]: E1208 22:44:33.609284 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:44:39 crc kubenswrapper[4791]: I1208 22:44:39.598541 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:44:39 crc kubenswrapper[4791]: E1208 22:44:39.599306 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:44:45 crc kubenswrapper[4791]: I1208 22:44:45.599103 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:44:45 crc kubenswrapper[4791]: E1208 22:44:45.600107 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:44:48 crc kubenswrapper[4791]: I1208 22:44:48.293221 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-b4kn6_a9643486-e98d-4bac-8f5b-202e0e1c9551/nmstate-console-plugin/0.log" Dec 08 22:44:48 crc kubenswrapper[4791]: I1208 22:44:48.452994 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-w5q9k_da3c6939-b434-456a-9593-52b6793d4c53/nmstate-handler/0.log" Dec 08 22:44:48 crc kubenswrapper[4791]: I1208 22:44:48.508562 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-4snw4_9b839d23-b374-47d4-ae08-37eef97533d6/kube-rbac-proxy/0.log" Dec 08 22:44:48 crc kubenswrapper[4791]: I1208 22:44:48.541763 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-4snw4_9b839d23-b374-47d4-ae08-37eef97533d6/nmstate-metrics/0.log" Dec 08 22:44:48 crc kubenswrapper[4791]: I1208 22:44:48.701634 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-rjsz7_caf3748e-6f48-478e-b1ee-b6e861f9e9c2/nmstate-operator/0.log" Dec 08 22:44:48 crc kubenswrapper[4791]: I1208 22:44:48.815231 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-l52qv_f00220d9-a8b6-4197-b2e4-acc99f79e2da/nmstate-webhook/0.log" Dec 08 22:44:53 crc kubenswrapper[4791]: I1208 22:44:53.608584 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:44:53 crc kubenswrapper[4791]: E1208 22:44:53.609727 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:44:58 crc kubenswrapper[4791]: I1208 22:44:58.598172 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:44:58 crc kubenswrapper[4791]: E1208 22:44:58.598923 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.147301 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt"] Dec 08 22:45:00 crc kubenswrapper[4791]: E1208 22:45:00.148177 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="registry-server" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.148193 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="registry-server" Dec 08 22:45:00 crc kubenswrapper[4791]: E1208 22:45:00.148214 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="extract-content" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.148220 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="extract-content" Dec 08 22:45:00 crc kubenswrapper[4791]: E1208 22:45:00.148233 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="extract-utilities" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.148240 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="extract-utilities" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.148446 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fe0d63b-4103-4692-a065-882f3f5c7a3a" containerName="registry-server" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.149357 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.155142 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.155300 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.163990 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt"] Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.330763 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-config-volume\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.330959 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-secret-volume\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.331004 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xb66\" (UniqueName: \"kubernetes.io/projected/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-kube-api-access-5xb66\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.433153 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-config-volume\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.433254 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-secret-volume\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.433285 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xb66\" (UniqueName: \"kubernetes.io/projected/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-kube-api-access-5xb66\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.434230 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-config-volume\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.444843 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-secret-volume\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.453319 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xb66\" (UniqueName: \"kubernetes.io/projected/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-kube-api-access-5xb66\") pod \"collect-profiles-29420565-lhjkt\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:00 crc kubenswrapper[4791]: I1208 22:45:00.477136 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:01 crc kubenswrapper[4791]: I1208 22:45:01.004906 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt"] Dec 08 22:45:01 crc kubenswrapper[4791]: I1208 22:45:01.270702 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" event={"ID":"85cfa0ae-b55a-4d76-ae24-555e376ec6c1","Type":"ContainerStarted","Data":"abfabf38bed38045e9a5db12caf43cbfaf882adaa054c3558c581dba12a8cfd0"} Dec 08 22:45:01 crc kubenswrapper[4791]: I1208 22:45:01.271056 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" event={"ID":"85cfa0ae-b55a-4d76-ae24-555e376ec6c1","Type":"ContainerStarted","Data":"8c2a68c0010d7d0abd8a76b49416fd5998616fa1a4dbc1a997d554a4fa3991c4"} Dec 08 22:45:02 crc kubenswrapper[4791]: I1208 22:45:02.286783 4791 generic.go:334] "Generic (PLEG): container finished" podID="85cfa0ae-b55a-4d76-ae24-555e376ec6c1" containerID="abfabf38bed38045e9a5db12caf43cbfaf882adaa054c3558c581dba12a8cfd0" exitCode=0 Dec 08 22:45:02 crc kubenswrapper[4791]: I1208 22:45:02.286882 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" event={"ID":"85cfa0ae-b55a-4d76-ae24-555e376ec6c1","Type":"ContainerDied","Data":"abfabf38bed38045e9a5db12caf43cbfaf882adaa054c3558c581dba12a8cfd0"} Dec 08 22:45:02 crc kubenswrapper[4791]: I1208 22:45:02.574116 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7b4df4946c-d67zd_0c11badb-ae6f-4efe-9d82-80545108b777/kube-rbac-proxy/0.log" Dec 08 22:45:02 crc kubenswrapper[4791]: I1208 22:45:02.623107 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7b4df4946c-d67zd_0c11badb-ae6f-4efe-9d82-80545108b777/manager/0.log" Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.712413 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.818031 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-config-volume\") pod \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.818231 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-secret-volume\") pod \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.818266 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xb66\" (UniqueName: \"kubernetes.io/projected/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-kube-api-access-5xb66\") pod \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\" (UID: \"85cfa0ae-b55a-4d76-ae24-555e376ec6c1\") " Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.819023 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-config-volume" (OuterVolumeSpecName: "config-volume") pod "85cfa0ae-b55a-4d76-ae24-555e376ec6c1" (UID: "85cfa0ae-b55a-4d76-ae24-555e376ec6c1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.824432 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "85cfa0ae-b55a-4d76-ae24-555e376ec6c1" (UID: "85cfa0ae-b55a-4d76-ae24-555e376ec6c1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.824501 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-kube-api-access-5xb66" (OuterVolumeSpecName: "kube-api-access-5xb66") pod "85cfa0ae-b55a-4d76-ae24-555e376ec6c1" (UID: "85cfa0ae-b55a-4d76-ae24-555e376ec6c1"). InnerVolumeSpecName "kube-api-access-5xb66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.921254 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.921298 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xb66\" (UniqueName: \"kubernetes.io/projected/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-kube-api-access-5xb66\") on node \"crc\" DevicePath \"\"" Dec 08 22:45:03 crc kubenswrapper[4791]: I1208 22:45:03.921308 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85cfa0ae-b55a-4d76-ae24-555e376ec6c1-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:45:04 crc kubenswrapper[4791]: I1208 22:45:04.308661 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" event={"ID":"85cfa0ae-b55a-4d76-ae24-555e376ec6c1","Type":"ContainerDied","Data":"8c2a68c0010d7d0abd8a76b49416fd5998616fa1a4dbc1a997d554a4fa3991c4"} Dec 08 22:45:04 crc kubenswrapper[4791]: I1208 22:45:04.309132 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c2a68c0010d7d0abd8a76b49416fd5998616fa1a4dbc1a997d554a4fa3991c4" Dec 08 22:45:04 crc kubenswrapper[4791]: I1208 22:45:04.308691 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420565-lhjkt" Dec 08 22:45:04 crc kubenswrapper[4791]: I1208 22:45:04.369487 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5"] Dec 08 22:45:04 crc kubenswrapper[4791]: I1208 22:45:04.380656 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-pkcq5"] Dec 08 22:45:05 crc kubenswrapper[4791]: I1208 22:45:05.611116 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="423e750a-05a4-43db-9c7b-57717d6e3903" path="/var/lib/kubelet/pods/423e750a-05a4-43db-9c7b-57717d6e3903/volumes" Dec 08 22:45:08 crc kubenswrapper[4791]: I1208 22:45:08.598908 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:45:08 crc kubenswrapper[4791]: E1208 22:45:08.599795 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:45:12 crc kubenswrapper[4791]: I1208 22:45:12.598898 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:45:12 crc kubenswrapper[4791]: E1208 22:45:12.599833 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:45:19 crc kubenswrapper[4791]: I1208 22:45:19.599628 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:45:19 crc kubenswrapper[4791]: E1208 22:45:19.600322 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:45:20 crc kubenswrapper[4791]: I1208 22:45:20.029001 4791 scope.go:117] "RemoveContainer" containerID="401bec1e857e0b5aeb9d702ceafd0e5997a0c95232c3ceeeb945c29acfd1db2e" Dec 08 22:45:21 crc kubenswrapper[4791]: I1208 22:45:21.550648 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_cluster-logging-operator-ff9846bd-qvvzh_1b8008a0-6001-445e-8da9-0c0f43ed3877/cluster-logging-operator/0.log" Dec 08 22:45:21 crc kubenswrapper[4791]: I1208 22:45:21.564573 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_collector-hvfq9_336b6c6a-4e6c-4123-9c5e-676554f18718/collector/0.log" Dec 08 22:45:21 crc kubenswrapper[4791]: I1208 22:45:21.788523 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-distributor-76cc67bf56-nbw68_ca262116-be5e-42bb-b68e-5d96c476628a/loki-distributor/0.log" Dec 08 22:45:21 crc kubenswrapper[4791]: I1208 22:45:21.795506 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-compactor-0_3f7c0b21-8653-4e60-9e31-48c491c92f1f/loki-compactor/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.001278 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-868b848d6f-mk4cx_2d8aae18-ca02-4ba1-8b8f-ca028ccea24e/gateway/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.008868 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-868b848d6f-mk4cx_2d8aae18-ca02-4ba1-8b8f-ca028ccea24e/opa/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.263530 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-868b848d6f-twnll_4d4e77a7-b71b-466f-8964-b1c4257c7c79/gateway/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.299472 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-868b848d6f-twnll_4d4e77a7-b71b-466f-8964-b1c4257c7c79/opa/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.353067 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-index-gateway-0_2097a8a1-e800-4cf3-89bc-8f540d7e6c3a/loki-index-gateway/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.565659 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-ingester-0_542dc8e1-9fec-4080-bd2d-8e51070f73a1/loki-ingester/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.605417 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-querier-5895d59bb8-jbrkm_02e28803-31d3-4093-892d-cb6ae8ca37a0/loki-querier/0.log" Dec 08 22:45:22 crc kubenswrapper[4791]: I1208 22:45:22.753082 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-query-frontend-84558f7c9f-qrzd8_544545f2-67f8-4bb2-8287-644c13874f93/loki-query-frontend/0.log" Dec 08 22:45:25 crc kubenswrapper[4791]: I1208 22:45:25.598627 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:45:25 crc kubenswrapper[4791]: E1208 22:45:25.599224 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:45:31 crc kubenswrapper[4791]: I1208 22:45:31.598684 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:45:31 crc kubenswrapper[4791]: E1208 22:45:31.600935 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.126065 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-wbgwq_ffeac0c0-7221-4e0b-a48f-c457875da8f7/kube-rbac-proxy/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.175175 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-wbgwq_ffeac0c0-7221-4e0b-a48f-c457875da8f7/controller/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.327764 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-frr-files/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.509978 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-reloader/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.534342 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-frr-files/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.549059 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-metrics/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.572528 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-reloader/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.774661 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-reloader/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.790104 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-metrics/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.795029 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-frr-files/0.log" Dec 08 22:45:39 crc kubenswrapper[4791]: I1208 22:45:39.815256 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-metrics/0.log" Dec 08 22:45:40 crc kubenswrapper[4791]: I1208 22:45:40.598600 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:45:40 crc kubenswrapper[4791]: E1208 22:45:40.599380 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:45:40 crc kubenswrapper[4791]: I1208 22:45:40.747165 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-reloader/0.log" Dec 08 22:45:40 crc kubenswrapper[4791]: I1208 22:45:40.772360 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/controller/0.log" Dec 08 22:45:40 crc kubenswrapper[4791]: I1208 22:45:40.790966 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-frr-files/0.log" Dec 08 22:45:40 crc kubenswrapper[4791]: I1208 22:45:40.807564 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/cp-metrics/0.log" Dec 08 22:45:41 crc kubenswrapper[4791]: I1208 22:45:41.010023 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/frr-metrics/0.log" Dec 08 22:45:41 crc kubenswrapper[4791]: I1208 22:45:41.031523 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/kube-rbac-proxy-frr/0.log" Dec 08 22:45:41 crc kubenswrapper[4791]: I1208 22:45:41.036878 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/kube-rbac-proxy/0.log" Dec 08 22:45:41 crc kubenswrapper[4791]: I1208 22:45:41.251209 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/reloader/0.log" Dec 08 22:45:41 crc kubenswrapper[4791]: I1208 22:45:41.534922 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-kf5ks_e25fb151-fa5d-4cbe-804c-6078095f6d70/frr-k8s-webhook-server/0.log" Dec 08 22:45:41 crc kubenswrapper[4791]: I1208 22:45:41.726128 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7c8c9ff997-ltwq7_7e0400bf-de7f-4e28-bcd5-4602c8fe0724/manager/0.log" Dec 08 22:45:42 crc kubenswrapper[4791]: I1208 22:45:42.008010 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5998c6b5bd-7wllz_c496942f-e4dd-42ac-b2ce-d6deb5c58e4e/webhook-server/0.log" Dec 08 22:45:42 crc kubenswrapper[4791]: I1208 22:45:42.116025 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2725l_e9d59a9f-d637-407a-a0f9-ae4f4e765b14/frr/0.log" Dec 08 22:45:42 crc kubenswrapper[4791]: I1208 22:45:42.847660 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7zhfz_6cf2cc89-0735-40c9-bdee-13e18de0a9ea/kube-rbac-proxy/0.log" Dec 08 22:45:43 crc kubenswrapper[4791]: I1208 22:45:43.262067 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7zhfz_6cf2cc89-0735-40c9-bdee-13e18de0a9ea/speaker/0.log" Dec 08 22:45:43 crc kubenswrapper[4791]: I1208 22:45:43.606367 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:45:43 crc kubenswrapper[4791]: E1208 22:45:43.606914 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:45:51 crc kubenswrapper[4791]: I1208 22:45:51.598059 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:45:51 crc kubenswrapper[4791]: E1208 22:45:51.598891 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:45:54 crc kubenswrapper[4791]: I1208 22:45:54.598107 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:45:54 crc kubenswrapper[4791]: E1208 22:45:54.599039 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.022900 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5_a292e97f-d0e0-47ea-8f22-77915dd393f3/util/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.255842 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5_a292e97f-d0e0-47ea-8f22-77915dd393f3/util/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.277877 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5_a292e97f-d0e0-47ea-8f22-77915dd393f3/pull/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.343444 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5_a292e97f-d0e0-47ea-8f22-77915dd393f3/pull/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.562791 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5_a292e97f-d0e0-47ea-8f22-77915dd393f3/extract/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.594128 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5_a292e97f-d0e0-47ea-8f22-77915dd393f3/util/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.601939 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8xpwb5_a292e97f-d0e0-47ea-8f22-77915dd393f3/pull/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.746476 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf_e8f61217-10a0-4bde-869e-28ad91534e07/util/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.944791 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf_e8f61217-10a0-4bde-869e-28ad91534e07/util/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.960632 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf_e8f61217-10a0-4bde-869e-28ad91534e07/pull/0.log" Dec 08 22:45:56 crc kubenswrapper[4791]: I1208 22:45:56.978556 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf_e8f61217-10a0-4bde-869e-28ad91534e07/pull/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.202609 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf_e8f61217-10a0-4bde-869e-28ad91534e07/util/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.212464 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf_e8f61217-10a0-4bde-869e-28ad91534e07/pull/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.223636 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7gmjf_e8f61217-10a0-4bde-869e-28ad91534e07/extract/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.398758 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d_3af99b44-74f9-417f-9b20-ee09a09fe7d1/util/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.558518 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d_3af99b44-74f9-417f-9b20-ee09a09fe7d1/pull/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.577473 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d_3af99b44-74f9-417f-9b20-ee09a09fe7d1/pull/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.605265 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d_3af99b44-74f9-417f-9b20-ee09a09fe7d1/util/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.769947 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d_3af99b44-74f9-417f-9b20-ee09a09fe7d1/pull/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.822841 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d_3af99b44-74f9-417f-9b20-ee09a09fe7d1/util/0.log" Dec 08 22:45:57 crc kubenswrapper[4791]: I1208 22:45:57.960660 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107942d_3af99b44-74f9-417f-9b20-ee09a09fe7d1/extract/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.038487 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6_0bb70a12-b198-4377-906b-8036ff49d91c/util/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.206217 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6_0bb70a12-b198-4377-906b-8036ff49d91c/util/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.222669 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6_0bb70a12-b198-4377-906b-8036ff49d91c/pull/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.259561 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6_0bb70a12-b198-4377-906b-8036ff49d91c/pull/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.461722 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6_0bb70a12-b198-4377-906b-8036ff49d91c/extract/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.463655 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6_0bb70a12-b198-4377-906b-8036ff49d91c/pull/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.479413 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f5zqd6_0bb70a12-b198-4377-906b-8036ff49d91c/util/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.700659 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n_ccbca2a1-3575-4209-bf34-d3812ad3bc1a/util/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.916405 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n_ccbca2a1-3575-4209-bf34-d3812ad3bc1a/util/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.921229 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n_ccbca2a1-3575-4209-bf34-d3812ad3bc1a/pull/0.log" Dec 08 22:45:58 crc kubenswrapper[4791]: I1208 22:45:58.926604 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n_ccbca2a1-3575-4209-bf34-d3812ad3bc1a/pull/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.151223 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n_ccbca2a1-3575-4209-bf34-d3812ad3bc1a/util/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.155381 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n_ccbca2a1-3575-4209-bf34-d3812ad3bc1a/pull/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.192488 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8372w4n_ccbca2a1-3575-4209-bf34-d3812ad3bc1a/extract/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.333869 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c6rk6_ce8d3d3b-2b87-4d1b-83b8-000beccecf40/extract-utilities/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.507189 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c6rk6_ce8d3d3b-2b87-4d1b-83b8-000beccecf40/extract-content/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.522628 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c6rk6_ce8d3d3b-2b87-4d1b-83b8-000beccecf40/extract-utilities/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.530060 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c6rk6_ce8d3d3b-2b87-4d1b-83b8-000beccecf40/extract-content/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.719446 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c6rk6_ce8d3d3b-2b87-4d1b-83b8-000beccecf40/extract-utilities/0.log" Dec 08 22:45:59 crc kubenswrapper[4791]: I1208 22:45:59.778330 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c6rk6_ce8d3d3b-2b87-4d1b-83b8-000beccecf40/extract-content/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.031889 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-l7wkb_f60fb0b8-f530-4722-aacb-2d39fcf03ee2/extract-utilities/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.204270 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c6rk6_ce8d3d3b-2b87-4d1b-83b8-000beccecf40/registry-server/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.275815 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-l7wkb_f60fb0b8-f530-4722-aacb-2d39fcf03ee2/extract-content/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.305869 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-l7wkb_f60fb0b8-f530-4722-aacb-2d39fcf03ee2/extract-utilities/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.323075 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-l7wkb_f60fb0b8-f530-4722-aacb-2d39fcf03ee2/extract-content/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.512926 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-l7wkb_f60fb0b8-f530-4722-aacb-2d39fcf03ee2/extract-content/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.542997 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-l7wkb_f60fb0b8-f530-4722-aacb-2d39fcf03ee2/extract-utilities/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.799310 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-5qfdd_9fc25b7b-1c62-4d70-b75d-c2f5a82b60fe/marketplace-operator/0.log" Dec 08 22:46:00 crc kubenswrapper[4791]: I1208 22:46:00.831691 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sqxkn_441a316f-5101-41ba-ac80-0065189657da/extract-utilities/0.log" Dec 08 22:46:01 crc kubenswrapper[4791]: I1208 22:46:01.519521 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-l7wkb_f60fb0b8-f530-4722-aacb-2d39fcf03ee2/registry-server/0.log" Dec 08 22:46:01 crc kubenswrapper[4791]: I1208 22:46:01.710205 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sqxkn_441a316f-5101-41ba-ac80-0065189657da/extract-content/0.log" Dec 08 22:46:01 crc kubenswrapper[4791]: I1208 22:46:01.734899 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sqxkn_441a316f-5101-41ba-ac80-0065189657da/extract-utilities/0.log" Dec 08 22:46:01 crc kubenswrapper[4791]: I1208 22:46:01.735158 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sqxkn_441a316f-5101-41ba-ac80-0065189657da/extract-content/0.log" Dec 08 22:46:01 crc kubenswrapper[4791]: I1208 22:46:01.940253 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sqxkn_441a316f-5101-41ba-ac80-0065189657da/extract-utilities/0.log" Dec 08 22:46:01 crc kubenswrapper[4791]: I1208 22:46:01.994498 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-578fb_46942f67-6d8f-4500-80c6-81c8d07c6fe5/extract-utilities/0.log" Dec 08 22:46:02 crc kubenswrapper[4791]: I1208 22:46:02.087114 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sqxkn_441a316f-5101-41ba-ac80-0065189657da/extract-content/0.log" Dec 08 22:46:02 crc kubenswrapper[4791]: I1208 22:46:02.220797 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sqxkn_441a316f-5101-41ba-ac80-0065189657da/registry-server/0.log" Dec 08 22:46:02 crc kubenswrapper[4791]: I1208 22:46:02.292740 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-578fb_46942f67-6d8f-4500-80c6-81c8d07c6fe5/extract-content/0.log" Dec 08 22:46:02 crc kubenswrapper[4791]: I1208 22:46:02.327619 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-578fb_46942f67-6d8f-4500-80c6-81c8d07c6fe5/extract-content/0.log" Dec 08 22:46:02 crc kubenswrapper[4791]: I1208 22:46:02.345943 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-578fb_46942f67-6d8f-4500-80c6-81c8d07c6fe5/extract-utilities/0.log" Dec 08 22:46:02 crc kubenswrapper[4791]: I1208 22:46:02.474606 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-578fb_46942f67-6d8f-4500-80c6-81c8d07c6fe5/extract-utilities/0.log" Dec 08 22:46:02 crc kubenswrapper[4791]: I1208 22:46:02.524361 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-578fb_46942f67-6d8f-4500-80c6-81c8d07c6fe5/extract-content/0.log" Dec 08 22:46:03 crc kubenswrapper[4791]: I1208 22:46:03.255622 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-578fb_46942f67-6d8f-4500-80c6-81c8d07c6fe5/registry-server/0.log" Dec 08 22:46:05 crc kubenswrapper[4791]: I1208 22:46:05.597990 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:46:05 crc kubenswrapper[4791]: E1208 22:46:05.598613 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:46:06 crc kubenswrapper[4791]: I1208 22:46:06.598361 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:46:06 crc kubenswrapper[4791]: E1208 22:46:06.598733 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:46:17 crc kubenswrapper[4791]: I1208 22:46:17.155585 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-8ckfk_5bf6b4bb-0cd5-4461-b351-def18dd64e8c/prometheus-operator/0.log" Dec 08 22:46:17 crc kubenswrapper[4791]: I1208 22:46:17.321481 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7cd7857c4f-5pfgk_c2830318-9f0e-4406-a86b-0622bd55b65b/prometheus-operator-admission-webhook/0.log" Dec 08 22:46:17 crc kubenswrapper[4791]: I1208 22:46:17.371175 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7cd7857c4f-w777c_df480993-0603-450c-9cec-1e3f5472e67a/prometheus-operator-admission-webhook/0.log" Dec 08 22:46:17 crc kubenswrapper[4791]: I1208 22:46:17.583650 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-m6mjp_e908ab9f-9726-406c-afae-77d716c404e3/operator/0.log" Dec 08 22:46:17 crc kubenswrapper[4791]: I1208 22:46:17.639895 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-cb4nk_33a18e1e-100a-4419-9119-7de245332906/perses-operator/0.log" Dec 08 22:46:18 crc kubenswrapper[4791]: I1208 22:46:18.598181 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:46:18 crc kubenswrapper[4791]: E1208 22:46:18.598755 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:46:21 crc kubenswrapper[4791]: I1208 22:46:21.598511 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:46:21 crc kubenswrapper[4791]: E1208 22:46:21.599414 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:46:30 crc kubenswrapper[4791]: I1208 22:46:30.599109 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:46:30 crc kubenswrapper[4791]: E1208 22:46:30.599896 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:46:32 crc kubenswrapper[4791]: I1208 22:46:32.435368 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7b4df4946c-d67zd_0c11badb-ae6f-4efe-9d82-80545108b777/manager/0.log" Dec 08 22:46:32 crc kubenswrapper[4791]: I1208 22:46:32.444027 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7b4df4946c-d67zd_0c11badb-ae6f-4efe-9d82-80545108b777/kube-rbac-proxy/0.log" Dec 08 22:46:33 crc kubenswrapper[4791]: I1208 22:46:33.605722 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:46:33 crc kubenswrapper[4791]: E1208 22:46:33.606134 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:46:45 crc kubenswrapper[4791]: I1208 22:46:45.597559 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:46:45 crc kubenswrapper[4791]: E1208 22:46:45.598526 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:46:48 crc kubenswrapper[4791]: I1208 22:46:48.599059 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:46:48 crc kubenswrapper[4791]: I1208 22:46:48.990794 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerStarted","Data":"6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b"} Dec 08 22:46:48 crc kubenswrapper[4791]: I1208 22:46:48.991529 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:46:53 crc kubenswrapper[4791]: E1208 22:46:53.985448 4791 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.173:57466->38.102.83.173:46209: write tcp 38.102.83.173:57466->38.102.83.173:46209: write: broken pipe Dec 08 22:46:55 crc kubenswrapper[4791]: I1208 22:46:55.889312 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:47:00 crc kubenswrapper[4791]: I1208 22:47:00.598485 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:47:00 crc kubenswrapper[4791]: E1208 22:47:00.599476 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:47:15 crc kubenswrapper[4791]: I1208 22:47:15.598595 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:47:15 crc kubenswrapper[4791]: E1208 22:47:15.599473 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:47:27 crc kubenswrapper[4791]: I1208 22:47:27.597835 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:47:27 crc kubenswrapper[4791]: E1208 22:47:27.598658 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:47:39 crc kubenswrapper[4791]: I1208 22:47:39.598326 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:47:39 crc kubenswrapper[4791]: E1208 22:47:39.598898 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:47:54 crc kubenswrapper[4791]: I1208 22:47:54.598348 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:47:54 crc kubenswrapper[4791]: E1208 22:47:54.599080 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgd9w_openshift-machine-config-operator(6cdfecf8-95cf-4c2b-b98f-eb7bb055771d)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" Dec 08 22:48:06 crc kubenswrapper[4791]: I1208 22:48:06.789175 4791 generic.go:334] "Generic (PLEG): container finished" podID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerID="02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712" exitCode=0 Dec 08 22:48:06 crc kubenswrapper[4791]: I1208 22:48:06.789274 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" event={"ID":"c125dfed-a0fd-4181-8461-d66b840ec3e1","Type":"ContainerDied","Data":"02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712"} Dec 08 22:48:06 crc kubenswrapper[4791]: I1208 22:48:06.790413 4791 scope.go:117] "RemoveContainer" containerID="02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712" Dec 08 22:48:06 crc kubenswrapper[4791]: I1208 22:48:06.884698 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2s4jt_must-gather-rwxfn_c125dfed-a0fd-4181-8461-d66b840ec3e1/gather/0.log" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.206537 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vztwg"] Dec 08 22:48:09 crc kubenswrapper[4791]: E1208 22:48:09.207569 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85cfa0ae-b55a-4d76-ae24-555e376ec6c1" containerName="collect-profiles" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.207582 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="85cfa0ae-b55a-4d76-ae24-555e376ec6c1" containerName="collect-profiles" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.207890 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="85cfa0ae-b55a-4d76-ae24-555e376ec6c1" containerName="collect-profiles" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.209648 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.223479 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vztwg"] Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.253939 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b4wq\" (UniqueName: \"kubernetes.io/projected/c1c438be-02be-49eb-bd2e-7b608033aa79-kube-api-access-7b4wq\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.254412 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-utilities\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.254937 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-catalog-content\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.357093 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-catalog-content\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.357203 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b4wq\" (UniqueName: \"kubernetes.io/projected/c1c438be-02be-49eb-bd2e-7b608033aa79-kube-api-access-7b4wq\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.357322 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-utilities\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.357777 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-catalog-content\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.357825 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-utilities\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.375847 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b4wq\" (UniqueName: \"kubernetes.io/projected/c1c438be-02be-49eb-bd2e-7b608033aa79-kube-api-access-7b4wq\") pod \"redhat-marketplace-vztwg\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.531835 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:09 crc kubenswrapper[4791]: I1208 22:48:09.599903 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:48:10 crc kubenswrapper[4791]: I1208 22:48:10.025052 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vztwg"] Dec 08 22:48:10 crc kubenswrapper[4791]: W1208 22:48:10.036762 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1c438be_02be_49eb_bd2e_7b608033aa79.slice/crio-3a23cd39cafb825e4662bf05632b142432ecf48bab629d74a8f9b68cf4d12a60 WatchSource:0}: Error finding container 3a23cd39cafb825e4662bf05632b142432ecf48bab629d74a8f9b68cf4d12a60: Status 404 returned error can't find the container with id 3a23cd39cafb825e4662bf05632b142432ecf48bab629d74a8f9b68cf4d12a60 Dec 08 22:48:10 crc kubenswrapper[4791]: I1208 22:48:10.855436 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vztwg" event={"ID":"c1c438be-02be-49eb-bd2e-7b608033aa79","Type":"ContainerStarted","Data":"40ce2266c2b862cfa55f9c3bed9e1a59294ef6278d0c32ef9ae9f38af05a8765"} Dec 08 22:48:10 crc kubenswrapper[4791]: I1208 22:48:10.855897 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vztwg" event={"ID":"c1c438be-02be-49eb-bd2e-7b608033aa79","Type":"ContainerStarted","Data":"3a23cd39cafb825e4662bf05632b142432ecf48bab629d74a8f9b68cf4d12a60"} Dec 08 22:48:10 crc kubenswrapper[4791]: I1208 22:48:10.860683 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"e4626798fa568d6a8d475120c115904a112d03ed75df80135b37cb942a2f92c8"} Dec 08 22:48:11 crc kubenswrapper[4791]: I1208 22:48:11.871429 4791 generic.go:334] "Generic (PLEG): container finished" podID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerID="40ce2266c2b862cfa55f9c3bed9e1a59294ef6278d0c32ef9ae9f38af05a8765" exitCode=0 Dec 08 22:48:11 crc kubenswrapper[4791]: I1208 22:48:11.871524 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vztwg" event={"ID":"c1c438be-02be-49eb-bd2e-7b608033aa79","Type":"ContainerDied","Data":"40ce2266c2b862cfa55f9c3bed9e1a59294ef6278d0c32ef9ae9f38af05a8765"} Dec 08 22:48:13 crc kubenswrapper[4791]: I1208 22:48:13.893368 4791 generic.go:334] "Generic (PLEG): container finished" podID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerID="48908859734fc7d8dd2e747e6129264fd11ca8369ff9b4c96ff71aa737cd233c" exitCode=0 Dec 08 22:48:13 crc kubenswrapper[4791]: I1208 22:48:13.893453 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vztwg" event={"ID":"c1c438be-02be-49eb-bd2e-7b608033aa79","Type":"ContainerDied","Data":"48908859734fc7d8dd2e747e6129264fd11ca8369ff9b4c96ff71aa737cd233c"} Dec 08 22:48:14 crc kubenswrapper[4791]: I1208 22:48:14.905309 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vztwg" event={"ID":"c1c438be-02be-49eb-bd2e-7b608033aa79","Type":"ContainerStarted","Data":"49a0d863bb4eb7e062aa8039e541e631caf2c223409c834219207b6b9b2c8541"} Dec 08 22:48:14 crc kubenswrapper[4791]: I1208 22:48:14.925235 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vztwg" podStartSLOduration=3.464515351 podStartE2EDuration="5.925214029s" podCreationTimestamp="2025-12-08 22:48:09 +0000 UTC" firstStartedPulling="2025-12-08 22:48:11.874031933 +0000 UTC m=+5368.572790268" lastFinishedPulling="2025-12-08 22:48:14.334730601 +0000 UTC m=+5371.033488946" observedRunningTime="2025-12-08 22:48:14.922281555 +0000 UTC m=+5371.621039900" watchObservedRunningTime="2025-12-08 22:48:14.925214029 +0000 UTC m=+5371.623972374" Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.304470 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2s4jt/must-gather-rwxfn"] Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.304738 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerName="copy" containerID="cri-o://6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949" gracePeriod=2 Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.310359 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2s4jt/must-gather-rwxfn"] Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.907315 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2s4jt_must-gather-rwxfn_c125dfed-a0fd-4181-8461-d66b840ec3e1/copy/0.log" Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.911639 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.918364 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2s4jt_must-gather-rwxfn_c125dfed-a0fd-4181-8461-d66b840ec3e1/copy/0.log" Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.918848 4791 generic.go:334] "Generic (PLEG): container finished" podID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerID="6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949" exitCode=143 Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.918934 4791 scope.go:117] "RemoveContainer" containerID="6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949" Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.919035 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2s4jt/must-gather-rwxfn" Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.950889 4791 scope.go:117] "RemoveContainer" containerID="02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712" Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.956687 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9qc4\" (UniqueName: \"kubernetes.io/projected/c125dfed-a0fd-4181-8461-d66b840ec3e1-kube-api-access-k9qc4\") pod \"c125dfed-a0fd-4181-8461-d66b840ec3e1\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " Dec 08 22:48:15 crc kubenswrapper[4791]: I1208 22:48:15.956760 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c125dfed-a0fd-4181-8461-d66b840ec3e1-must-gather-output\") pod \"c125dfed-a0fd-4181-8461-d66b840ec3e1\" (UID: \"c125dfed-a0fd-4181-8461-d66b840ec3e1\") " Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.000320 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c125dfed-a0fd-4181-8461-d66b840ec3e1-kube-api-access-k9qc4" (OuterVolumeSpecName: "kube-api-access-k9qc4") pod "c125dfed-a0fd-4181-8461-d66b840ec3e1" (UID: "c125dfed-a0fd-4181-8461-d66b840ec3e1"). InnerVolumeSpecName "kube-api-access-k9qc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.065487 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9qc4\" (UniqueName: \"kubernetes.io/projected/c125dfed-a0fd-4181-8461-d66b840ec3e1-kube-api-access-k9qc4\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.132586 4791 scope.go:117] "RemoveContainer" containerID="6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949" Dec 08 22:48:16 crc kubenswrapper[4791]: E1208 22:48:16.133419 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949\": container with ID starting with 6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949 not found: ID does not exist" containerID="6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.133461 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949"} err="failed to get container status \"6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949\": rpc error: code = NotFound desc = could not find container \"6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949\": container with ID starting with 6278133db0f824e5ef87fdcd0582d838099f9aed192c976b01165be57f7d5949 not found: ID does not exist" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.133488 4791 scope.go:117] "RemoveContainer" containerID="02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712" Dec 08 22:48:16 crc kubenswrapper[4791]: E1208 22:48:16.133728 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712\": container with ID starting with 02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712 not found: ID does not exist" containerID="02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.133758 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712"} err="failed to get container status \"02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712\": rpc error: code = NotFound desc = could not find container \"02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712\": container with ID starting with 02948ec0830ee3a8edb2319135a2b32db29ed2686aebd5d35f412cb017026712 not found: ID does not exist" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.198613 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g8bdx"] Dec 08 22:48:16 crc kubenswrapper[4791]: E1208 22:48:16.199773 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerName="gather" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.199793 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerName="gather" Dec 08 22:48:16 crc kubenswrapper[4791]: E1208 22:48:16.199824 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerName="copy" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.199832 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerName="copy" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.201749 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerName="copy" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.201769 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" containerName="gather" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.203656 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.214005 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g8bdx"] Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.257822 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c125dfed-a0fd-4181-8461-d66b840ec3e1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "c125dfed-a0fd-4181-8461-d66b840ec3e1" (UID: "c125dfed-a0fd-4181-8461-d66b840ec3e1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.272074 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5xf7\" (UniqueName: \"kubernetes.io/projected/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-kube-api-access-r5xf7\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.272450 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-utilities\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.272538 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-catalog-content\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.272973 4791 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c125dfed-a0fd-4181-8461-d66b840ec3e1-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.374526 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5xf7\" (UniqueName: \"kubernetes.io/projected/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-kube-api-access-r5xf7\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.374662 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-utilities\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.374683 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-catalog-content\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.375184 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-catalog-content\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.375230 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-utilities\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.395437 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5xf7\" (UniqueName: \"kubernetes.io/projected/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-kube-api-access-r5xf7\") pod \"redhat-operators-g8bdx\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:16 crc kubenswrapper[4791]: I1208 22:48:16.541560 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:17 crc kubenswrapper[4791]: W1208 22:48:17.097839 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3f58bdb_1ca2_4933_b5e4_16d275a6139d.slice/crio-71c188aaa7d02cc66084df94ae4e27ce498d6646bc74c212272870e08af73a4e WatchSource:0}: Error finding container 71c188aaa7d02cc66084df94ae4e27ce498d6646bc74c212272870e08af73a4e: Status 404 returned error can't find the container with id 71c188aaa7d02cc66084df94ae4e27ce498d6646bc74c212272870e08af73a4e Dec 08 22:48:17 crc kubenswrapper[4791]: I1208 22:48:17.104641 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g8bdx"] Dec 08 22:48:17 crc kubenswrapper[4791]: I1208 22:48:17.613026 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c125dfed-a0fd-4181-8461-d66b840ec3e1" path="/var/lib/kubelet/pods/c125dfed-a0fd-4181-8461-d66b840ec3e1/volumes" Dec 08 22:48:17 crc kubenswrapper[4791]: I1208 22:48:17.949104 4791 generic.go:334] "Generic (PLEG): container finished" podID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerID="5bd4b77b1f1ee761093d0b87a91b805c0db068a96f6c524294e3bb1c970192ed" exitCode=0 Dec 08 22:48:17 crc kubenswrapper[4791]: I1208 22:48:17.949209 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8bdx" event={"ID":"f3f58bdb-1ca2-4933-b5e4-16d275a6139d","Type":"ContainerDied","Data":"5bd4b77b1f1ee761093d0b87a91b805c0db068a96f6c524294e3bb1c970192ed"} Dec 08 22:48:17 crc kubenswrapper[4791]: I1208 22:48:17.949405 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8bdx" event={"ID":"f3f58bdb-1ca2-4933-b5e4-16d275a6139d","Type":"ContainerStarted","Data":"71c188aaa7d02cc66084df94ae4e27ce498d6646bc74c212272870e08af73a4e"} Dec 08 22:48:19 crc kubenswrapper[4791]: I1208 22:48:19.532466 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:19 crc kubenswrapper[4791]: I1208 22:48:19.534216 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:19 crc kubenswrapper[4791]: I1208 22:48:19.593351 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:19 crc kubenswrapper[4791]: I1208 22:48:19.967742 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8bdx" event={"ID":"f3f58bdb-1ca2-4933-b5e4-16d275a6139d","Type":"ContainerStarted","Data":"f73d3e0044df3965aef1c6d18256e353ca0ee9cc61008e3e62d19367eb1d64e9"} Dec 08 22:48:20 crc kubenswrapper[4791]: I1208 22:48:20.022506 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:21 crc kubenswrapper[4791]: I1208 22:48:21.385700 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vztwg"] Dec 08 22:48:21 crc kubenswrapper[4791]: I1208 22:48:21.992908 4791 generic.go:334] "Generic (PLEG): container finished" podID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerID="f73d3e0044df3965aef1c6d18256e353ca0ee9cc61008e3e62d19367eb1d64e9" exitCode=0 Dec 08 22:48:21 crc kubenswrapper[4791]: I1208 22:48:21.992967 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8bdx" event={"ID":"f3f58bdb-1ca2-4933-b5e4-16d275a6139d","Type":"ContainerDied","Data":"f73d3e0044df3965aef1c6d18256e353ca0ee9cc61008e3e62d19367eb1d64e9"} Dec 08 22:48:21 crc kubenswrapper[4791]: I1208 22:48:21.993442 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vztwg" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="registry-server" containerID="cri-o://49a0d863bb4eb7e062aa8039e541e631caf2c223409c834219207b6b9b2c8541" gracePeriod=2 Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.020329 4791 generic.go:334] "Generic (PLEG): container finished" podID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerID="49a0d863bb4eb7e062aa8039e541e631caf2c223409c834219207b6b9b2c8541" exitCode=0 Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.021432 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vztwg" event={"ID":"c1c438be-02be-49eb-bd2e-7b608033aa79","Type":"ContainerDied","Data":"49a0d863bb4eb7e062aa8039e541e631caf2c223409c834219207b6b9b2c8541"} Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.692378 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.866039 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-utilities\") pod \"c1c438be-02be-49eb-bd2e-7b608033aa79\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.866402 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b4wq\" (UniqueName: \"kubernetes.io/projected/c1c438be-02be-49eb-bd2e-7b608033aa79-kube-api-access-7b4wq\") pod \"c1c438be-02be-49eb-bd2e-7b608033aa79\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.866605 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-catalog-content\") pod \"c1c438be-02be-49eb-bd2e-7b608033aa79\" (UID: \"c1c438be-02be-49eb-bd2e-7b608033aa79\") " Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.868248 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-utilities" (OuterVolumeSpecName: "utilities") pod "c1c438be-02be-49eb-bd2e-7b608033aa79" (UID: "c1c438be-02be-49eb-bd2e-7b608033aa79"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.882093 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1c438be-02be-49eb-bd2e-7b608033aa79-kube-api-access-7b4wq" (OuterVolumeSpecName: "kube-api-access-7b4wq") pod "c1c438be-02be-49eb-bd2e-7b608033aa79" (UID: "c1c438be-02be-49eb-bd2e-7b608033aa79"). InnerVolumeSpecName "kube-api-access-7b4wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.900386 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1c438be-02be-49eb-bd2e-7b608033aa79" (UID: "c1c438be-02be-49eb-bd2e-7b608033aa79"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.969704 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.969755 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c438be-02be-49eb-bd2e-7b608033aa79-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:23 crc kubenswrapper[4791]: I1208 22:48:23.969769 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b4wq\" (UniqueName: \"kubernetes.io/projected/c1c438be-02be-49eb-bd2e-7b608033aa79-kube-api-access-7b4wq\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.036309 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8bdx" event={"ID":"f3f58bdb-1ca2-4933-b5e4-16d275a6139d","Type":"ContainerStarted","Data":"bcca4a38b6ab459b20a927171ab131a62aaf27589e05445007b537941b5671c2"} Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.041946 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vztwg" event={"ID":"c1c438be-02be-49eb-bd2e-7b608033aa79","Type":"ContainerDied","Data":"3a23cd39cafb825e4662bf05632b142432ecf48bab629d74a8f9b68cf4d12a60"} Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.042026 4791 scope.go:117] "RemoveContainer" containerID="49a0d863bb4eb7e062aa8039e541e631caf2c223409c834219207b6b9b2c8541" Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.043429 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vztwg" Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.065276 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g8bdx" podStartSLOduration=3.039589666 podStartE2EDuration="8.065251298s" podCreationTimestamp="2025-12-08 22:48:16 +0000 UTC" firstStartedPulling="2025-12-08 22:48:17.954195468 +0000 UTC m=+5374.652953813" lastFinishedPulling="2025-12-08 22:48:22.9798571 +0000 UTC m=+5379.678615445" observedRunningTime="2025-12-08 22:48:24.055827502 +0000 UTC m=+5380.754585847" watchObservedRunningTime="2025-12-08 22:48:24.065251298 +0000 UTC m=+5380.764009653" Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.072183 4791 scope.go:117] "RemoveContainer" containerID="48908859734fc7d8dd2e747e6129264fd11ca8369ff9b4c96ff71aa737cd233c" Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.092154 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vztwg"] Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.112279 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vztwg"] Dec 08 22:48:24 crc kubenswrapper[4791]: I1208 22:48:24.124455 4791 scope.go:117] "RemoveContainer" containerID="40ce2266c2b862cfa55f9c3bed9e1a59294ef6278d0c32ef9ae9f38af05a8765" Dec 08 22:48:25 crc kubenswrapper[4791]: I1208 22:48:25.613127 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" path="/var/lib/kubelet/pods/c1c438be-02be-49eb-bd2e-7b608033aa79/volumes" Dec 08 22:48:26 crc kubenswrapper[4791]: I1208 22:48:26.541922 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:26 crc kubenswrapper[4791]: I1208 22:48:26.542541 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:27 crc kubenswrapper[4791]: I1208 22:48:27.660547 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-g8bdx" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="registry-server" probeResult="failure" output=< Dec 08 22:48:27 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Dec 08 22:48:27 crc kubenswrapper[4791]: > Dec 08 22:48:36 crc kubenswrapper[4791]: I1208 22:48:36.597428 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:36 crc kubenswrapper[4791]: I1208 22:48:36.659043 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:36 crc kubenswrapper[4791]: I1208 22:48:36.846244 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g8bdx"] Dec 08 22:48:38 crc kubenswrapper[4791]: I1208 22:48:38.193570 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g8bdx" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="registry-server" containerID="cri-o://bcca4a38b6ab459b20a927171ab131a62aaf27589e05445007b537941b5671c2" gracePeriod=2 Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.206092 4791 generic.go:334] "Generic (PLEG): container finished" podID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerID="bcca4a38b6ab459b20a927171ab131a62aaf27589e05445007b537941b5671c2" exitCode=0 Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.206204 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8bdx" event={"ID":"f3f58bdb-1ca2-4933-b5e4-16d275a6139d","Type":"ContainerDied","Data":"bcca4a38b6ab459b20a927171ab131a62aaf27589e05445007b537941b5671c2"} Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.206566 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8bdx" event={"ID":"f3f58bdb-1ca2-4933-b5e4-16d275a6139d","Type":"ContainerDied","Data":"71c188aaa7d02cc66084df94ae4e27ce498d6646bc74c212272870e08af73a4e"} Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.206589 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71c188aaa7d02cc66084df94ae4e27ce498d6646bc74c212272870e08af73a4e" Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.917086 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.970947 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-utilities\") pod \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.971225 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5xf7\" (UniqueName: \"kubernetes.io/projected/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-kube-api-access-r5xf7\") pod \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.971310 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-catalog-content\") pod \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\" (UID: \"f3f58bdb-1ca2-4933-b5e4-16d275a6139d\") " Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.972387 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-utilities" (OuterVolumeSpecName: "utilities") pod "f3f58bdb-1ca2-4933-b5e4-16d275a6139d" (UID: "f3f58bdb-1ca2-4933-b5e4-16d275a6139d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:48:39 crc kubenswrapper[4791]: I1208 22:48:39.978202 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-kube-api-access-r5xf7" (OuterVolumeSpecName: "kube-api-access-r5xf7") pod "f3f58bdb-1ca2-4933-b5e4-16d275a6139d" (UID: "f3f58bdb-1ca2-4933-b5e4-16d275a6139d"). InnerVolumeSpecName "kube-api-access-r5xf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:48:40 crc kubenswrapper[4791]: I1208 22:48:40.075236 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5xf7\" (UniqueName: \"kubernetes.io/projected/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-kube-api-access-r5xf7\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:40 crc kubenswrapper[4791]: I1208 22:48:40.075300 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:40 crc kubenswrapper[4791]: I1208 22:48:40.095965 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3f58bdb-1ca2-4933-b5e4-16d275a6139d" (UID: "f3f58bdb-1ca2-4933-b5e4-16d275a6139d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:48:40 crc kubenswrapper[4791]: I1208 22:48:40.179032 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3f58bdb-1ca2-4933-b5e4-16d275a6139d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:48:40 crc kubenswrapper[4791]: I1208 22:48:40.216870 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8bdx" Dec 08 22:48:40 crc kubenswrapper[4791]: I1208 22:48:40.260792 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g8bdx"] Dec 08 22:48:40 crc kubenswrapper[4791]: I1208 22:48:40.269914 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g8bdx"] Dec 08 22:48:41 crc kubenswrapper[4791]: I1208 22:48:41.609511 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" path="/var/lib/kubelet/pods/f3f58bdb-1ca2-4933-b5e4-16d275a6139d/volumes" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.228147 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ltr8x"] Dec 08 22:48:52 crc kubenswrapper[4791]: E1208 22:48:52.230462 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="extract-utilities" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.230570 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="extract-utilities" Dec 08 22:48:52 crc kubenswrapper[4791]: E1208 22:48:52.230678 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="registry-server" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.230761 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="registry-server" Dec 08 22:48:52 crc kubenswrapper[4791]: E1208 22:48:52.230834 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="extract-content" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.230905 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="extract-content" Dec 08 22:48:52 crc kubenswrapper[4791]: E1208 22:48:52.230977 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="extract-content" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.231038 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="extract-content" Dec 08 22:48:52 crc kubenswrapper[4791]: E1208 22:48:52.231105 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="registry-server" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.231161 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="registry-server" Dec 08 22:48:52 crc kubenswrapper[4791]: E1208 22:48:52.231329 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="extract-utilities" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.231398 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="extract-utilities" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.231788 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3f58bdb-1ca2-4933-b5e4-16d275a6139d" containerName="registry-server" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.231909 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1c438be-02be-49eb-bd2e-7b608033aa79" containerName="registry-server" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.233922 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.240330 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ltr8x"] Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.256198 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-utilities\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.256558 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-catalog-content\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.256780 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmmg4\" (UniqueName: \"kubernetes.io/projected/e9de21e6-7c26-469a-92bc-d9227a20fefd-kube-api-access-bmmg4\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.358459 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmmg4\" (UniqueName: \"kubernetes.io/projected/e9de21e6-7c26-469a-92bc-d9227a20fefd-kube-api-access-bmmg4\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.358626 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-utilities\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.358666 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-catalog-content\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.359139 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-utilities\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.359233 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-catalog-content\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.376087 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmmg4\" (UniqueName: \"kubernetes.io/projected/e9de21e6-7c26-469a-92bc-d9227a20fefd-kube-api-access-bmmg4\") pod \"community-operators-ltr8x\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:52 crc kubenswrapper[4791]: I1208 22:48:52.558968 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:48:53 crc kubenswrapper[4791]: I1208 22:48:53.222183 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ltr8x"] Dec 08 22:48:53 crc kubenswrapper[4791]: I1208 22:48:53.383509 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltr8x" event={"ID":"e9de21e6-7c26-469a-92bc-d9227a20fefd","Type":"ContainerStarted","Data":"c039c3dae73dad8e1e0ef199e9679b893b416bc728ac615205b811683caa5438"} Dec 08 22:48:54 crc kubenswrapper[4791]: I1208 22:48:54.402897 4791 generic.go:334] "Generic (PLEG): container finished" podID="e9de21e6-7c26-469a-92bc-d9227a20fefd" containerID="9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c" exitCode=0 Dec 08 22:48:54 crc kubenswrapper[4791]: I1208 22:48:54.402967 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltr8x" event={"ID":"e9de21e6-7c26-469a-92bc-d9227a20fefd","Type":"ContainerDied","Data":"9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c"} Dec 08 22:48:55 crc kubenswrapper[4791]: I1208 22:48:55.415969 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltr8x" event={"ID":"e9de21e6-7c26-469a-92bc-d9227a20fefd","Type":"ContainerStarted","Data":"6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd"} Dec 08 22:48:56 crc kubenswrapper[4791]: I1208 22:48:56.430218 4791 generic.go:334] "Generic (PLEG): container finished" podID="e9de21e6-7c26-469a-92bc-d9227a20fefd" containerID="6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd" exitCode=0 Dec 08 22:48:56 crc kubenswrapper[4791]: I1208 22:48:56.430330 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltr8x" event={"ID":"e9de21e6-7c26-469a-92bc-d9227a20fefd","Type":"ContainerDied","Data":"6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd"} Dec 08 22:48:57 crc kubenswrapper[4791]: I1208 22:48:57.442118 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltr8x" event={"ID":"e9de21e6-7c26-469a-92bc-d9227a20fefd","Type":"ContainerStarted","Data":"f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41"} Dec 08 22:48:57 crc kubenswrapper[4791]: I1208 22:48:57.464509 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ltr8x" podStartSLOduration=3.044314258 podStartE2EDuration="5.4644914s" podCreationTimestamp="2025-12-08 22:48:52 +0000 UTC" firstStartedPulling="2025-12-08 22:48:54.415445638 +0000 UTC m=+5411.114203983" lastFinishedPulling="2025-12-08 22:48:56.83562277 +0000 UTC m=+5413.534381125" observedRunningTime="2025-12-08 22:48:57.463189568 +0000 UTC m=+5414.161947923" watchObservedRunningTime="2025-12-08 22:48:57.4644914 +0000 UTC m=+5414.163249745" Dec 08 22:49:02 crc kubenswrapper[4791]: I1208 22:49:02.559754 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:49:02 crc kubenswrapper[4791]: I1208 22:49:02.560342 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:49:02 crc kubenswrapper[4791]: I1208 22:49:02.605536 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:49:03 crc kubenswrapper[4791]: I1208 22:49:03.543468 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:49:03 crc kubenswrapper[4791]: I1208 22:49:03.591346 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ltr8x"] Dec 08 22:49:05 crc kubenswrapper[4791]: I1208 22:49:05.513547 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ltr8x" podUID="e9de21e6-7c26-469a-92bc-d9227a20fefd" containerName="registry-server" containerID="cri-o://f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41" gracePeriod=2 Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.515213 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.525501 4791 generic.go:334] "Generic (PLEG): container finished" podID="e9de21e6-7c26-469a-92bc-d9227a20fefd" containerID="f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41" exitCode=0 Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.525583 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltr8x" event={"ID":"e9de21e6-7c26-469a-92bc-d9227a20fefd","Type":"ContainerDied","Data":"f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41"} Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.525944 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltr8x" event={"ID":"e9de21e6-7c26-469a-92bc-d9227a20fefd","Type":"ContainerDied","Data":"c039c3dae73dad8e1e0ef199e9679b893b416bc728ac615205b811683caa5438"} Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.525595 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltr8x" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.525999 4791 scope.go:117] "RemoveContainer" containerID="f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.569477 4791 scope.go:117] "RemoveContainer" containerID="6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.592059 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmmg4\" (UniqueName: \"kubernetes.io/projected/e9de21e6-7c26-469a-92bc-d9227a20fefd-kube-api-access-bmmg4\") pod \"e9de21e6-7c26-469a-92bc-d9227a20fefd\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.592224 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-catalog-content\") pod \"e9de21e6-7c26-469a-92bc-d9227a20fefd\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.592346 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-utilities\") pod \"e9de21e6-7c26-469a-92bc-d9227a20fefd\" (UID: \"e9de21e6-7c26-469a-92bc-d9227a20fefd\") " Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.594638 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-utilities" (OuterVolumeSpecName: "utilities") pod "e9de21e6-7c26-469a-92bc-d9227a20fefd" (UID: "e9de21e6-7c26-469a-92bc-d9227a20fefd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.594841 4791 scope.go:117] "RemoveContainer" containerID="9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.603355 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9de21e6-7c26-469a-92bc-d9227a20fefd-kube-api-access-bmmg4" (OuterVolumeSpecName: "kube-api-access-bmmg4") pod "e9de21e6-7c26-469a-92bc-d9227a20fefd" (UID: "e9de21e6-7c26-469a-92bc-d9227a20fefd"). InnerVolumeSpecName "kube-api-access-bmmg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.659751 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9de21e6-7c26-469a-92bc-d9227a20fefd" (UID: "e9de21e6-7c26-469a-92bc-d9227a20fefd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.700102 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmmg4\" (UniqueName: \"kubernetes.io/projected/e9de21e6-7c26-469a-92bc-d9227a20fefd-kube-api-access-bmmg4\") on node \"crc\" DevicePath \"\"" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.700142 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.700156 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9de21e6-7c26-469a-92bc-d9227a20fefd-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.719613 4791 scope.go:117] "RemoveContainer" containerID="f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41" Dec 08 22:49:06 crc kubenswrapper[4791]: E1208 22:49:06.720246 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41\": container with ID starting with f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41 not found: ID does not exist" containerID="f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.720289 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41"} err="failed to get container status \"f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41\": rpc error: code = NotFound desc = could not find container \"f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41\": container with ID starting with f3d27f0a660c7bc05f1c0867a20e7382041350c4e600c8db12b3c9ffa89f8f41 not found: ID does not exist" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.720318 4791 scope.go:117] "RemoveContainer" containerID="6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd" Dec 08 22:49:06 crc kubenswrapper[4791]: E1208 22:49:06.720550 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd\": container with ID starting with 6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd not found: ID does not exist" containerID="6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.720583 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd"} err="failed to get container status \"6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd\": rpc error: code = NotFound desc = could not find container \"6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd\": container with ID starting with 6ee7f7a3291d240695d81113e8fa9a59c78a839c4ceb0594c910f6c26fa976bd not found: ID does not exist" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.720609 4791 scope.go:117] "RemoveContainer" containerID="9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c" Dec 08 22:49:06 crc kubenswrapper[4791]: E1208 22:49:06.720972 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c\": container with ID starting with 9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c not found: ID does not exist" containerID="9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.721008 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c"} err="failed to get container status \"9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c\": rpc error: code = NotFound desc = could not find container \"9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c\": container with ID starting with 9b3fb67d1874f0ebbc63e0d5ef249a73729bd23864f2140f9e88468216a9728c not found: ID does not exist" Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.869020 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ltr8x"] Dec 08 22:49:06 crc kubenswrapper[4791]: I1208 22:49:06.878120 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ltr8x"] Dec 08 22:49:07 crc kubenswrapper[4791]: I1208 22:49:07.610226 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9de21e6-7c26-469a-92bc-d9227a20fefd" path="/var/lib/kubelet/pods/e9de21e6-7c26-469a-92bc-d9227a20fefd/volumes" Dec 08 22:49:21 crc kubenswrapper[4791]: I1208 22:49:21.700832 4791 generic.go:334] "Generic (PLEG): container finished" podID="bcd8d669-4a40-401d-af99-651b840fb48b" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" exitCode=1 Dec 08 22:49:21 crc kubenswrapper[4791]: I1208 22:49:21.701267 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" event={"ID":"bcd8d669-4a40-401d-af99-651b840fb48b","Type":"ContainerDied","Data":"6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b"} Dec 08 22:49:21 crc kubenswrapper[4791]: I1208 22:49:21.701303 4791 scope.go:117] "RemoveContainer" containerID="d1d66e6afd76d88feb8ad14a9a90b9e7ce167c449b3742aef02e4f45a1de08c8" Dec 08 22:49:21 crc kubenswrapper[4791]: I1208 22:49:21.702022 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:49:21 crc kubenswrapper[4791]: E1208 22:49:21.702345 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:49:25 crc kubenswrapper[4791]: I1208 22:49:25.884443 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:49:25 crc kubenswrapper[4791]: I1208 22:49:25.885048 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" Dec 08 22:49:25 crc kubenswrapper[4791]: I1208 22:49:25.885798 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:49:25 crc kubenswrapper[4791]: E1208 22:49:25.886112 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:49:40 crc kubenswrapper[4791]: I1208 22:49:40.598328 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:49:40 crc kubenswrapper[4791]: E1208 22:49:40.598994 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:49:51 crc kubenswrapper[4791]: I1208 22:49:51.598665 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:49:51 crc kubenswrapper[4791]: E1208 22:49:51.599558 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:50:03 crc kubenswrapper[4791]: I1208 22:50:03.605617 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:50:03 crc kubenswrapper[4791]: E1208 22:50:03.606412 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:50:15 crc kubenswrapper[4791]: I1208 22:50:15.599695 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:50:15 crc kubenswrapper[4791]: E1208 22:50:15.600506 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:50:20 crc kubenswrapper[4791]: I1208 22:50:20.234332 4791 scope.go:117] "RemoveContainer" containerID="49a4ca4e97881a9ed827fa92d11bb85f5c8d698817558bc20c521937931cb5b2" Dec 08 22:50:20 crc kubenswrapper[4791]: I1208 22:50:20.263391 4791 scope.go:117] "RemoveContainer" containerID="8b24e2fd7f6dc01bcbdc9f22b8dff0079593ce20b01840abfe8d11c03691d7f8" Dec 08 22:50:20 crc kubenswrapper[4791]: I1208 22:50:20.318227 4791 scope.go:117] "RemoveContainer" containerID="efd65846d7636142509573d2d66aa476a8189d418a0c7288c09b076024adf1c0" Dec 08 22:50:26 crc kubenswrapper[4791]: I1208 22:50:26.598621 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:50:26 crc kubenswrapper[4791]: E1208 22:50:26.599448 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:50:35 crc kubenswrapper[4791]: I1208 22:50:35.252073 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:50:35 crc kubenswrapper[4791]: I1208 22:50:35.252580 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:50:38 crc kubenswrapper[4791]: I1208 22:50:38.597779 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:50:38 crc kubenswrapper[4791]: E1208 22:50:38.598484 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:50:50 crc kubenswrapper[4791]: I1208 22:50:50.597798 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:50:50 crc kubenswrapper[4791]: E1208 22:50:50.598697 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:51:04 crc kubenswrapper[4791]: I1208 22:51:04.598671 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:51:04 crc kubenswrapper[4791]: E1208 22:51:04.599773 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:51:05 crc kubenswrapper[4791]: I1208 22:51:05.252328 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:51:05 crc kubenswrapper[4791]: I1208 22:51:05.252470 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:51:16 crc kubenswrapper[4791]: I1208 22:51:16.598710 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:51:16 crc kubenswrapper[4791]: E1208 22:51:16.599590 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:51:28 crc kubenswrapper[4791]: I1208 22:51:28.598642 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:51:28 crc kubenswrapper[4791]: E1208 22:51:28.599368 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:51:35 crc kubenswrapper[4791]: I1208 22:51:35.251320 4791 patch_prober.go:28] interesting pod/machine-config-daemon-kgd9w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:51:35 crc kubenswrapper[4791]: I1208 22:51:35.251825 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:51:35 crc kubenswrapper[4791]: I1208 22:51:35.251903 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" Dec 08 22:51:35 crc kubenswrapper[4791]: I1208 22:51:35.252787 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e4626798fa568d6a8d475120c115904a112d03ed75df80135b37cb942a2f92c8"} pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:51:35 crc kubenswrapper[4791]: I1208 22:51:35.252843 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" podUID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerName="machine-config-daemon" containerID="cri-o://e4626798fa568d6a8d475120c115904a112d03ed75df80135b37cb942a2f92c8" gracePeriod=600 Dec 08 22:51:36 crc kubenswrapper[4791]: I1208 22:51:36.119175 4791 generic.go:334] "Generic (PLEG): container finished" podID="6cdfecf8-95cf-4c2b-b98f-eb7bb055771d" containerID="e4626798fa568d6a8d475120c115904a112d03ed75df80135b37cb942a2f92c8" exitCode=0 Dec 08 22:51:36 crc kubenswrapper[4791]: I1208 22:51:36.119215 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerDied","Data":"e4626798fa568d6a8d475120c115904a112d03ed75df80135b37cb942a2f92c8"} Dec 08 22:51:36 crc kubenswrapper[4791]: I1208 22:51:36.119675 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgd9w" event={"ID":"6cdfecf8-95cf-4c2b-b98f-eb7bb055771d","Type":"ContainerStarted","Data":"b6c27c81aff1c32788059bbadb3b42e3ebd9e6fb9a9d5f2311c342d01f66aca0"} Dec 08 22:51:36 crc kubenswrapper[4791]: I1208 22:51:36.119696 4791 scope.go:117] "RemoveContainer" containerID="70973cf87c58a479cd2e1f6477c1a3ec7af9883a93b55c6f4884d75fee584abd" Dec 08 22:51:42 crc kubenswrapper[4791]: I1208 22:51:42.599917 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:51:42 crc kubenswrapper[4791]: E1208 22:51:42.600995 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:51:56 crc kubenswrapper[4791]: I1208 22:51:56.598040 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:51:56 crc kubenswrapper[4791]: E1208 22:51:56.598737 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:52:07 crc kubenswrapper[4791]: I1208 22:52:07.597892 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:52:07 crc kubenswrapper[4791]: E1208 22:52:07.598657 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:52:22 crc kubenswrapper[4791]: I1208 22:52:22.598699 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:52:22 crc kubenswrapper[4791]: E1208 22:52:22.599392 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" Dec 08 22:52:33 crc kubenswrapper[4791]: I1208 22:52:33.611236 4791 scope.go:117] "RemoveContainer" containerID="6382d06121b6c1ed820ae522a060a16c139c725e2fbc42627a8157cff474406b" Dec 08 22:52:33 crc kubenswrapper[4791]: E1208 22:52:33.613079 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-58wmm_openstack-operators(bcd8d669-4a40-401d-af99-651b840fb48b)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-58wmm" podUID="bcd8d669-4a40-401d-af99-651b840fb48b" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515115653275024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015115653276017375 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015115637754016523 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015115637754015473 5ustar corecore